code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import weakref
from twisted.application import internet
from twisted.application import service
from twisted.internet import defer
from twisted.python import failure
from twisted.python import log
from twisted.spread import pb
from zope.interface import implements
from buildbot import config
from buildbot import interfaces
from buildbot.process import buildrequest
from buildbot.process import slavebuilder
from buildbot.process.build import Build
from buildbot.process.properties import Properties
from buildbot.process.slavebuilder import BUILDING
from buildbot.status.builder import RETRY
from buildbot.status.buildrequest import BuildRequestStatus
from buildbot.status.progress import Expectations
def enforceChosenSlave(bldr, slavebuilder, breq):
if 'slavename' in breq.properties:
slavename = breq.properties['slavename']
if isinstance(slavename, basestring):
return slavename == slavebuilder.slave.slavename
return True
class Builder(config.ReconfigurableServiceMixin,
pb.Referenceable,
service.MultiService):
# reconfigure builders before slaves
reconfig_priority = 196
def __init__(self, name, _addServices=True):
service.MultiService.__init__(self)
self.name = name
# this is created the first time we get a good build
self.expectations = None
# build/wannabuild slots: Build objects move along this sequence
self.building = []
# old_building holds active builds that were stolen from a predecessor
self.old_building = weakref.WeakKeyDictionary()
# buildslaves which have connected but which are not yet available.
# These are always in the ATTACHING state.
self.attaching_slaves = []
# buildslaves at our disposal. Each SlaveBuilder instance has a
# .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a
# Build is about to start, to make sure that they're still alive.
self.slaves = []
self.config = None
self.builder_status = None
if _addServices:
self.reclaim_svc = internet.TimerService(10 * 60,
self.reclaimAllBuilds)
self.reclaim_svc.setServiceParent(self)
# update big status every 30 minutes, working around #1980
self.updateStatusService = internet.TimerService(30 * 60,
self.updateBigStatus)
self.updateStatusService.setServiceParent(self)
def reconfigService(self, new_config):
# find this builder in the config
for builder_config in new_config.builders:
if builder_config.name == self.name:
break
else:
assert 0, "no config found for builder '%s'" % self.name
# set up a builder status object on the first reconfig
if not self.builder_status:
self.builder_status = self.master.status.builderAdded(
builder_config.name,
builder_config.builddir,
builder_config.category,
builder_config.description)
self.config = builder_config
self.builder_status.setDescription(builder_config.description)
self.builder_status.setCategory(builder_config.category)
self.builder_status.setSlavenames(self.config.slavenames)
self.builder_status.setCacheSize(new_config.caches['Builds'])
# if we have any slavebuilders attached which are no longer configured,
# drop them.
new_slavenames = set(builder_config.slavenames)
self.slaves = [s for s in self.slaves
if s.slave.slavename in new_slavenames]
return defer.succeed(None)
def stopService(self):
d = defer.maybeDeferred(lambda:
service.MultiService.stopService(self))
return d
def __repr__(self):
return "<Builder '%r' at %d>" % (self.name, id(self))
@defer.inlineCallbacks
def getOldestRequestTime(self):
"""Returns the submitted_at of the oldest unclaimed build request for
this builder, or None if there are no build requests.
@returns: datetime instance or None, via Deferred
"""
unclaimed = yield self.master.db.buildrequests.getBuildRequests(
buildername=self.name, claimed=False)
if unclaimed:
unclaimed = sorted([brd['submitted_at'] for brd in unclaimed])
defer.returnValue(unclaimed[0])
else:
defer.returnValue(None)
def reclaimAllBuilds(self):
brids = set()
for b in self.building:
brids.update([br.id for br in b.requests])
for b in self.old_building:
brids.update([br.id for br in b.requests])
if not brids:
return defer.succeed(None)
d = self.master.db.buildrequests.reclaimBuildRequests(brids)
d.addErrback(log.err, 'while re-claiming running BuildRequests')
return d
def getBuild(self, number):
for b in self.building:
if b.build_status and b.build_status.number == number:
return b
for b in self.old_building.keys():
if b.build_status and b.build_status.number == number:
return b
return None
def addLatentSlave(self, slave):
assert interfaces.ILatentBuildSlave.providedBy(slave)
for s in self.slaves:
if s == slave:
break
else:
sb = slavebuilder.LatentSlaveBuilder(slave, self)
self.builder_status.addPointEvent(
['added', 'latent', slave.slavename])
self.slaves.append(sb)
self.botmaster.maybeStartBuildsForBuilder(self.name)
def attached(self, slave, remote, commands):
"""This is invoked by the BuildSlave when the self.slavename bot
registers their builder.
@type slave: L{buildbot.buildslave.BuildSlave}
@param slave: the BuildSlave that represents the buildslave as a whole
@type remote: L{twisted.spread.pb.RemoteReference}
@param remote: a reference to the L{buildbot.slave.bot.SlaveBuilder}
@type commands: dict: string -> string, or None
@param commands: provides the slave's version of each RemoteCommand
@rtype: L{twisted.internet.defer.Deferred}
@return: a Deferred that fires (with 'self') when the slave-side
builder is fully attached and ready to accept commands.
"""
for s in self.attaching_slaves + self.slaves:
if s.slave == slave:
# already attached to them. This is fairly common, since
# attached() gets called each time we receive the builder
# list from the slave, and we ask for it each time we add or
# remove a builder. So if the slave is hosting builders
# A,B,C, and the config file changes A, we'll remove A and
# re-add it, triggering two builder-list requests, getting
# two redundant calls to attached() for B, and another two
# for C.
#
# Therefore, when we see that we're already attached, we can
# just ignore it.
return defer.succeed(self)
sb = slavebuilder.SlaveBuilder()
sb.setBuilder(self)
self.attaching_slaves.append(sb)
d = sb.attached(slave, remote, commands)
d.addCallback(self._attached)
d.addErrback(self._not_attached, slave)
return d
def _attached(self, sb):
self.builder_status.addPointEvent(['connect', sb.slave.slavename])
self.attaching_slaves.remove(sb)
self.slaves.append(sb)
self.updateBigStatus()
return self
def _not_attached(self, why, slave):
# already log.err'ed by SlaveBuilder._attachFailure
# TODO: remove from self.slaves (except that detached() should get
# run first, right?)
log.err(why, 'slave failed to attach')
self.builder_status.addPointEvent(['failed', 'connect',
slave.slavename])
# TODO: add an HTMLLogFile of the exception
def detached(self, slave):
"""This is called when the connection to the bot is lost."""
for sb in self.attaching_slaves + self.slaves:
if sb.slave == slave:
break
else:
log.msg("WEIRD: Builder.detached(%s) (%s)"
" not in attaching_slaves(%s)"
" or slaves(%s)" % (slave, slave.slavename,
self.attaching_slaves,
self.slaves))
return
if sb.state == BUILDING:
# the Build's .lostRemote method (invoked by a notifyOnDisconnect
# handler) will cause the Build to be stopped, probably right
# after the notifyOnDisconnect that invoked us finishes running.
pass
if sb in self.attaching_slaves:
self.attaching_slaves.remove(sb)
if sb in self.slaves:
self.slaves.remove(sb)
self.builder_status.addPointEvent(['disconnect', slave.slavename])
sb.detached() # inform the SlaveBuilder that their slave went away
self.updateBigStatus()
def updateBigStatus(self):
try:
# Catch exceptions here, since this is called in a LoopingCall.
if not self.builder_status:
return
if not self.slaves:
self.builder_status.setBigState("offline")
elif self.building or self.old_building:
self.builder_status.setBigState("building")
else:
self.builder_status.setBigState("idle")
except Exception:
log.err(None, "while trying to update status of builder '%s'" % (self.name,))
def getAvailableSlaves(self):
return [sb for sb in self.slaves if sb.isAvailable()]
def canStartWithSlavebuilder(self, slavebuilder):
locks = [(self.botmaster.getLockFromLockAccess(access), access)
for access in self.config.locks]
return Build.canStartWithSlavebuilder(locks, slavebuilder)
def canStartBuild(self, slavebuilder, breq):
if callable(self.config.canStartBuild):
return defer.maybeDeferred(self.config.canStartBuild, self, slavebuilder, breq)
return defer.succeed(True)
@defer.inlineCallbacks
def _startBuildFor(self, slavebuilder, buildrequests):
"""Start a build on the given slave.
@param build: the L{base.Build} to start
@param sb: the L{SlaveBuilder} which will host this build
@return: (via Deferred) boolean indicating that the build was
succesfully started.
"""
# as of the Python versions supported now, try/finally can't be used
# with a generator expression. So instead, we push cleanup functions
# into a list so that, at any point, we can abort this operation.
cleanups = []
def run_cleanups():
try:
while cleanups:
fn = cleanups.pop()
fn()
except:
log.err(failure.Failure(), "while running %r" % (run_cleanups,))
# the last cleanup we want to perform is to update the big
# status based on any other cleanup
cleanups.append(lambda: self.updateBigStatus())
build = self.config.factory.newBuild(buildrequests)
build.setBuilder(self)
log.msg("starting build %s using slave %s" % (build, slavebuilder))
# set up locks
build.setLocks(self.config.locks)
cleanups.append(lambda: slavebuilder.slave.releaseLocks())
if len(self.config.env) > 0:
build.setSlaveEnvironment(self.config.env)
# append the build to self.building
self.building.append(build)
cleanups.append(lambda: self.building.remove(build))
# update the big status accordingly
self.updateBigStatus()
try:
ready = yield slavebuilder.prepare(self.builder_status, build)
except:
log.err(failure.Failure(), 'while preparing slavebuilder:')
ready = False
# If prepare returns True then it is ready and we start a build
# If it returns false then we don't start a new build.
if not ready:
log.msg("slave %s can't build %s after all; re-queueing the "
"request" % (build, slavebuilder))
run_cleanups()
defer.returnValue(False)
return
# ping the slave to make sure they're still there. If they've
# fallen off the map (due to a NAT timeout or something), this
# will fail in a couple of minutes, depending upon the TCP
# timeout.
#
# TODO: This can unnecessarily suspend the starting of a build, in
# situations where the slave is live but is pushing lots of data to
# us in a build.
log.msg("starting build %s.. pinging the slave %s"
% (build, slavebuilder))
try:
ping_success = yield slavebuilder.ping()
except:
log.err(failure.Failure(), 'while pinging slave before build:')
ping_success = False
if not ping_success:
log.msg("slave ping failed; re-queueing the request")
run_cleanups()
defer.returnValue(False)
return
# The buildslave is ready to go. slavebuilder.buildStarted() sets its
# state to BUILDING (so we won't try to use it for any other builds).
# This gets set back to IDLE by the Build itself when it finishes.
slavebuilder.buildStarted()
cleanups.append(lambda: slavebuilder.buildFinished())
# tell the remote that it's starting a build, too
try:
yield slavebuilder.remote.callRemote("startBuild")
except:
log.err(failure.Failure(), 'while calling remote startBuild:')
run_cleanups()
defer.returnValue(False)
return
# create the BuildStatus object that goes with the Build
bs = self.builder_status.newBuild()
# record the build in the db - one row per buildrequest
try:
bids = []
for req in build.requests:
bid = yield self.master.db.builds.addBuild(req.id, bs.number)
bids.append(bid)
except:
log.err(failure.Failure(), 'while adding rows to build table:')
run_cleanups()
defer.returnValue(False)
return
# IMPORTANT: no yielding is allowed from here to the startBuild call!
# it's possible that we lost the slave remote between the ping above
# and now. If so, bail out. The build.startBuild call below transfers
# responsibility for monitoring this connection to the Build instance,
# so this check ensures we hand off a working connection.
if not slavebuilder.remote:
log.msg("slave disappeared before build could start")
run_cleanups()
defer.returnValue(False)
return
# let status know
self.master.status.build_started(req.id, self.name, bs)
# start the build. This will first set up the steps, then tell the
# BuildStatus that it has started, which will announce it to the world
# (through our BuilderStatus object, which is its parent). Finally it
# will start the actual build process. This is done with a fresh
# Deferred since _startBuildFor should not wait until the build is
# finished. This uses `maybeDeferred` to ensure that any exceptions
# raised by startBuild are treated as deferred errbacks (see
# http://trac.buildbot.net/ticket/2428).
d = defer.maybeDeferred(build.startBuild,
bs, self.expectations, slavebuilder)
d.addCallback(self.buildFinished, slavebuilder, bids)
# this shouldn't happen. if it does, the slave will be wedged
d.addErrback(log.err, 'from a running build; this is a '
'serious error - please file a bug at http://buildbot.net')
# make sure the builder's status is represented correctly
self.updateBigStatus()
defer.returnValue(True)
def setupProperties(self, props):
props.setProperty("buildername", self.name, "Builder")
if len(self.config.properties) > 0:
for propertyname in self.config.properties:
props.setProperty(propertyname,
self.config.properties[propertyname],
"Builder")
def buildFinished(self, build, sb, bids):
"""This is called when the Build has finished (either success or
failure). Any exceptions during the build are reported with
results=FAILURE, not with an errback."""
# by the time we get here, the Build has already released the slave,
# which will trigger a check for any now-possible build requests
# (maybeStartBuilds)
# mark the builds as finished, although since nothing ever reads this
# table, it's not too important that it complete successfully
d = self.master.db.builds.finishBuilds(bids)
d.addErrback(log.err, 'while marking builds as finished (ignored)')
results = build.build_status.getResults()
self.building.remove(build)
if results == RETRY:
self._resubmit_buildreqs(build).addErrback(log.err)
else:
brids = [br.id for br in build.requests]
db = self.master.db
d = db.buildrequests.completeBuildRequests(brids, results)
d.addCallback(
lambda _: self._maybeBuildsetsComplete(build.requests))
# nothing in particular to do with this deferred, so just log it if
# it fails..
d.addErrback(log.err, 'while marking build requests as completed')
if sb.slave:
sb.slave.releaseLocks()
self.updateBigStatus()
@defer.inlineCallbacks
def _maybeBuildsetsComplete(self, requests):
# inform the master that we may have completed a number of buildsets
for br in requests:
yield self.master.maybeBuildsetComplete(br.bsid)
def _resubmit_buildreqs(self, build):
brids = [br.id for br in build.requests]
return self.master.db.buildrequests.unclaimBuildRequests(brids)
def setExpectations(self, progress):
"""Mark the build as successful and update expectations for the next
build. Only call this when the build did not fail in any way that
would invalidate the time expectations generated by it. (if the
compile failed and thus terminated early, we can't use the last
build to predict how long the next one will take).
"""
if self.expectations:
self.expectations.update(progress)
else:
# the first time we get a good build, create our Expectations
# based upon its results
self.expectations = Expectations(progress)
log.msg("new expectations: %s seconds" %
self.expectations.expectedBuildTime())
# Build Creation
@defer.inlineCallbacks
def maybeStartBuild(self, slavebuilder, breqs):
# This method is called by the botmaster whenever this builder should
# start a set of buildrequests on a slave. Do not call this method
# directly - use master.botmaster.maybeStartBuildsForBuilder, or one of
# the other similar methods if more appropriate
# first, if we're not running, then don't start builds; stopService
# uses this to ensure that any ongoing maybeStartBuild invocations
# are complete before it stops.
if not self.running:
defer.returnValue(False)
return
# If the build fails from here on out (e.g., because a slave has failed),
# it will be handled outside of this function. TODO: test that!
build_started = yield self._startBuildFor(slavebuilder, breqs)
defer.returnValue(build_started)
# a few utility functions to make the maybeStartBuild a bit shorter and
# easier to read
def getMergeRequestsFn(self):
"""Helper function to determine which mergeRequests function to use
from L{_mergeRequests}, or None for no merging"""
# first, seek through builder, global, and the default
mergeRequests_fn = self.config.mergeRequests
if mergeRequests_fn is None:
mergeRequests_fn = self.master.config.mergeRequests
if mergeRequests_fn is None:
mergeRequests_fn = True
# then translate False and True properly
if mergeRequests_fn is False:
mergeRequests_fn = None
elif mergeRequests_fn is True:
mergeRequests_fn = Builder._defaultMergeRequestFn
return mergeRequests_fn
def _defaultMergeRequestFn(self, req1, req2):
return req1.canBeMergedWith(req2)
class BuilderControl:
implements(interfaces.IBuilderControl)
def __init__(self, builder, control):
self.original = builder
self.control = control
def submitBuildRequest(self, ss, reason, props=None):
d = ss.getSourceStampSetId(self.control.master)
def add_buildset(sourcestampsetid):
return self.control.master.addBuildset(
builderNames=[self.original.name],
sourcestampsetid=sourcestampsetid, reason=reason, properties=props)
d.addCallback(add_buildset)
def get_brs(xxx_todo_changeme):
(bsid, brids) = xxx_todo_changeme
brs = BuildRequestStatus(self.original.name,
brids[self.original.name],
self.control.master.status)
return brs
d.addCallback(get_brs)
return d
@defer.inlineCallbacks
def rebuildBuild(self, bs, reason="<rebuild, no reason given>", extraProperties=None, absolute=True):
if not bs.isFinished():
return
# Make a copy of the properties so as not to modify the original build.
properties = Properties()
# Don't include runtime-set properties in a rebuild request
properties.updateFromPropertiesNoRuntime(bs.getProperties())
if extraProperties is None:
properties.updateFromProperties(extraProperties)
properties_dict = dict((k, (v, s)) for (k, v, s) in properties.asList())
ssList = bs.getSourceStamps(absolute=absolute)
if ssList:
sourcestampsetid = yield ssList[0].getSourceStampSetId(self.control.master)
dl = []
for ss in ssList[1:]:
# add deferred to the list
dl.append(ss.addSourceStampToDatabase(self.control.master, sourcestampsetid))
yield defer.gatherResults(dl)
bsid, brids = yield self.control.master.addBuildset(
builderNames=[self.original.name],
sourcestampsetid=sourcestampsetid,
reason=reason,
properties=properties_dict)
defer.returnValue((bsid, brids))
else:
log.msg('Cannot start rebuild, rebuild has no sourcestamps for a new build')
defer.returnValue(None)
@defer.inlineCallbacks
def getPendingBuildRequestControls(self):
master = self.original.master
brdicts = yield master.db.buildrequests.getBuildRequests(
buildername=self.original.name,
claimed=False)
# convert those into BuildRequest objects
buildrequests = []
for brdict in brdicts:
br = yield buildrequest.BuildRequest.fromBrdict(
self.control.master, brdict)
buildrequests.append(br)
# and return the corresponding control objects
defer.returnValue([buildrequest.BuildRequestControl(self.original, r)
for r in buildrequests])
def getBuild(self, number):
return self.original.getBuild(number)
def ping(self):
if not self.original.slaves:
self.original.builder_status.addPointEvent(["ping", "no slave"])
return defer.succeed(False) # interfaces.NoSlaveError
dl = []
for s in self.original.slaves:
dl.append(s.ping(self.original.builder_status))
d = defer.DeferredList(dl)
d.addCallback(self._gatherPingResults)
return d
def _gatherPingResults(self, res):
for ignored, success in res:
if not success:
return False
return True
| mitya57/debian-buildbot | buildbot/process/builder.py | Python | gpl-2.0 | 25,892 |
#! /usr/bin/env python
#
# test_errors.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for error handling
"""
import unittest
import nest
import sys
class ErrorTestCase(unittest.TestCase):
"""Tests if errors are handled correctly"""
def test_Raise(self):
"""Error raising"""
nest.ResetKernel()
try:
raise nest.NESTError('test')
self.fail('an error should have risen!') # should not be reached
except nest.NESTError:
info = sys.exc_info()[1]
if not "test" in info.__str__():
self.fail('could not pass error message to NEST!')
# another error has been thrown, this is wrong
except:
self.fail('wrong error has been thrown')
def test_StackUnderFlow(self):
"""Stack underflow"""
nest.ResetKernel()
try:
nest.sr('clear ;')
self.fail('an error should have risen!') # should not be reached
except nest.NESTError:
info = sys.exc_info()[1]
if not "StackUnderflow" in info.__str__():
self.fail('wrong error message')
# another error has been thrown, this is wrong
except:
self.fail('wrong error has been thrown')
def test_DivisionByZero(self):
"""Division by zero"""
nest.ResetKernel()
try:
nest.sr('1 0 div')
self.fail('an error should have risen!') # should not be reached
except nest.NESTError:
info = sys.exc_info()[1]
if not "DivisionByZero" in info.__str__():
self.fail('wrong error message')
# another error has been thrown, this is wrong
except:
self.fail('wrong error has been thrown')
def test_UnknownNode(self):
"""Unknown node"""
nest.ResetKernel()
try:
nest.Connect([99],[99])
self.fail('an error should have risen!') # should not be reached
except nest.NESTError:
info = sys.exc_info()[1]
if not "UnknownNode" in info.__str__():
self.fail('wrong error message')
# another error has been thrown, this is wrong
except:
self.fail('wrong error has been thrown')
def test_UnknownModel(self):
"""Unknown model name"""
nest.ResetKernel()
try:
nest.Create(-1)
self.fail('an error should have risen!') # should not be reached
except nest.NESTError:
info = sys.exc_info()[1]
if not "UnknownModelName" in info.__str__():
self.fail('wrong error message')
# another error has been thrown, this is wrong
except:
self.fail('wrong error has been thrown')
def suite():
suite = unittest.makeSuite(ErrorTestCase,'test')
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
| gewaltig/cython-neuron | pynest/nest/tests/test_errors.py | Python | gpl-2.0 | 3,727 |
''' Crawl the running Docker site and verify all links give a 200 OK '''
import unittest
import subprocess
# Placeholder for future python based codi/TURFF
class BasicTests(unittest.TestCase):
''' Base class for testing '''
def setUp(self):
''' Define some unique data for validation '''
pass
def tearDown(self):
''' Destroy unique data '''
pass
| todorez/crops | tests/unit/test_basic.py | Python | gpl-2.0 | 397 |
# encoding: utf-8
# Author: Zhang Huangbin <[email protected]>
import os
import glob
import web
langmaps = {
'en_US': u'English (US)',
'sq_AL': u'Albanian',
'ar_SA': u'Arabic',
'hy_AM': u'Armenian',
'az_AZ': u'Azerbaijani',
'bs_BA': u'Bosnian (Serbian Latin)',
'bg_BG': u'Bulgarian',
'ca_ES': u'Català',
'cy_GB': u'Cymraeg',
'hr_HR': u'Croatian (Hrvatski)',
'cs_CZ': u'Čeština',
'da_DK': u'Dansk',
'de_DE': u'Deutsch (Deutsch)',
'de_CH': u'Deutsch (Schweiz)',
'en_GB': u'English (GB)',
'es_ES': u'Español',
'eo': u'Esperanto',
'et_EE': u'Estonian',
'eu_ES': u'Euskara (Basque)',
'fi_FI': u'Finnish (Suomi)',
'nl_BE': u'Flemish',
'fr_FR': u'Français',
'gl_ES': u'Galego (Galician)',
'ka_GE': u'Georgian (Kartuli)',
'el_GR': u'Greek',
'he_IL': u'Hebrew',
'hi_IN': u'Hindi',
'hu_HU': u'Hungarian',
'is_IS': u'Icelandic',
'id_ID': u'Indonesian',
'ga_IE': u'Irish',
'it_IT': u'Italiano',
'ja_JP': u'Japanese (日本語)',
'ko_KR': u'Korean',
'ku': u'Kurdish (Kurmancî)',
'lv_LV': u'Latvian',
'lt_LT': u'Lithuanian',
'mk_MK': u'Macedonian',
'ms_MY': u'Malay',
'nl_NL': u'Netherlands',
'ne_NP': u'Nepali',
'nb_NO': u'Norsk (Bokmål)',
'nn_NO': u'Norsk (Nynorsk)',
'fa': u'Persian (Farsi)',
'pl_PL': u'Polski',
'pt_BR': u'Portuguese (Brazilian)',
'pt_PT': u'Portuguese (Standard)',
'ro_RO': u'Romanian',
'ru_RU': u'Русский',
'sr_CS': u'Serbian (Cyrillic)',
'si_LK': u'Sinhala',
'sk_SK': u'Slovak',
'sl_SI': u'Slovenian',
'sv_SE': u'Swedish (Svenska)',
'th_TH': u'Thai',
'tr_TR': u'Türkçe',
'uk_UA': u'Ukrainian',
'vi_VN': u'Vietnamese',
'zh_CN': u'简体中文',
'zh_TW': u'繁體中文',
}
# All available timezone names and time offsets (in minutes).
allTimezonesOffsets = {
'GMT-12:00': -720,
'GMT-11:00': -660,
'GMT-10:00': -600,
'GMT-09:30': -570,
'GMT-09:00': -540,
'GMT-08:00': -480,
'GMT-07:00': -420,
'GMT-06:00': -360,
'GMT-05:00': -300,
'GMT-04:30': -270,
'GMT-04:00': -240,
'GMT-03:30': -210,
'GMT-03:00': -180,
'GMT-02:00': -120,
'GMT-01:00': -60,
'GMT': 0,
'GMT+01:00': 60,
'GMT+02:00': 120,
'GMT+03:00': 180,
'GMT+03:30': 210,
'GMT+04:00': 240,
'GMT+04:30': 270,
'GMT+05:00': 300,
'GMT+05:30': 330,
'GMT+05:45': 345,
'GMT+06:00': 360,
'GMT+06:30': 390,
'GMT+07:00': 420,
'GMT+08:00': 480,
'GMT+08:45': 525,
'GMT+09:00': 540,
'GMT+09:30': 570,
'GMT+10:00': 600,
'GMT+10:30': 630,
'GMT+11:00': 660,
'GMT+11:30': 690,
'GMT+12:00': 720,
'GMT+12:45': 765,
'GMT+13:00': 780,
'GMT+14:00': 840,
}
# Get available languages.
def get_language_maps():
# Get available languages.
rootdir = os.path.abspath(os.path.dirname(__file__)) + '/../'
available_langs = [
web.safestr(os.path.basename(v))
for v in glob.glob(rootdir + 'i18n/[a-z][a-z]_[A-Z][A-Z]')
if os.path.basename(v) in langmaps]
available_langs += [
web.safestr(os.path.basename(v))
for v in glob.glob(rootdir + 'i18n/[a-z][a-z]')
if os.path.basename(v) in langmaps]
available_langs.sort()
# Get language maps.
languagemaps = {}
for i in available_langs:
if i in langmaps:
languagemaps.update({i: langmaps[i]})
return languagemaps
| villaverde/iredadmin | libs/languages.py | Python | gpl-2.0 | 3,515 |
# Copyright (C) 2009, 2010 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import absolute_import
import os
import time
from bzrlib import (
controldir,
errors,
osutils,
registry,
trace,
)
from bzrlib.i18n import gettext
from bzrlib.branch import (
Branch,
)
from bzrlib.revision import (
NULL_REVISION,
)
format_registry = registry.Registry()
def send(target_branch, revision, public_branch, remember,
format, no_bundle, no_patch, output, from_, mail_to, message, body,
to_file, strict=None):
possible_transports = []
tree, branch = controldir.ControlDir.open_containing_tree_or_branch(
from_, possible_transports=possible_transports)[:2]
# we may need to write data into branch's repository to calculate
# the data to send.
branch.lock_write()
try:
if output is None:
config_stack = branch.get_config_stack()
if mail_to is None:
mail_to = config_stack.get('submit_to')
mail_client = config_stack.get('mail_client')(config_stack)
if (not getattr(mail_client, 'supports_body', False)
and body is not None):
raise errors.BzrCommandError(gettext(
'Mail client "%s" does not support specifying body') %
mail_client.__class__.__name__)
if remember and target_branch is None:
raise errors.BzrCommandError(gettext(
'--remember requires a branch to be specified.'))
stored_target_branch = branch.get_submit_branch()
remembered_target_branch = None
if target_branch is None:
target_branch = stored_target_branch
remembered_target_branch = "submit"
else:
# Remembers if asked explicitly or no previous location is set
if remember or (
remember is None and stored_target_branch is None):
branch.set_submit_branch(target_branch)
if target_branch is None:
target_branch = branch.get_parent()
remembered_target_branch = "parent"
if target_branch is None:
raise errors.BzrCommandError(gettext('No submit branch known or'
' specified'))
if remembered_target_branch is not None:
trace.note(gettext('Using saved {0} location "{1}" to determine '
'what changes to submit.').format(
remembered_target_branch,
target_branch))
submit_branch = Branch.open(target_branch,
possible_transports=possible_transports)
possible_transports.append(submit_branch.bzrdir.root_transport)
if mail_to is None or format is None:
if mail_to is None:
mail_to = submit_branch.get_config_stack().get(
'child_submit_to')
if format is None:
formatname = submit_branch.get_child_submit_format()
try:
format = format_registry.get(formatname)
except KeyError:
raise errors.BzrCommandError(
gettext("No such send format '%s'.") % formatname)
stored_public_branch = branch.get_public_branch()
if public_branch is None:
public_branch = stored_public_branch
# Remembers if asked explicitly or no previous location is set
elif (remember
or (remember is None and stored_public_branch is None)):
branch.set_public_branch(public_branch)
if no_bundle and public_branch is None:
raise errors.BzrCommandError(gettext('No public branch specified or'
' known'))
base_revision_id = None
revision_id = None
if revision is not None:
if len(revision) > 2:
raise errors.BzrCommandError(gettext('bzr send takes '
'at most two one revision identifiers'))
revision_id = revision[-1].as_revision_id(branch)
if len(revision) == 2:
base_revision_id = revision[0].as_revision_id(branch)
if revision_id is None:
if tree is not None:
tree.check_changed_or_out_of_date(
strict, 'send_strict',
more_error='Use --no-strict to force the send.',
more_warning='Uncommitted changes will not be sent.')
revision_id = branch.last_revision()
if revision_id == NULL_REVISION:
raise errors.BzrCommandError(gettext('No revisions to submit.'))
if format is None:
format = format_registry.get()
directive = format(branch, revision_id, target_branch,
public_branch, no_patch, no_bundle, message, base_revision_id,
submit_branch)
if output is None:
directive.compose_merge_request(mail_client, mail_to, body,
branch, tree)
else:
if directive.multiple_output_files:
if output == '-':
raise errors.BzrCommandError(gettext('- not supported for '
'merge directives that use more than one output file.'))
if not os.path.exists(output):
os.mkdir(output, 0755)
for (filename, lines) in directive.to_files():
path = os.path.join(output, filename)
outfile = open(path, 'wb')
try:
outfile.writelines(lines)
finally:
outfile.close()
else:
if output == '-':
outfile = to_file
else:
outfile = open(output, 'wb')
try:
outfile.writelines(directive.to_lines())
finally:
if outfile is not to_file:
outfile.close()
finally:
branch.unlock()
def _send_4(branch, revision_id, target_branch, public_branch,
no_patch, no_bundle, message,
base_revision_id, local_target_branch=None):
from bzrlib import merge_directive
return merge_directive.MergeDirective2.from_objects(
branch.repository, revision_id, time.time(),
osutils.local_time_offset(), target_branch,
public_branch=public_branch,
include_patch=not no_patch,
include_bundle=not no_bundle, message=message,
base_revision_id=base_revision_id,
local_target_branch=local_target_branch)
def _send_0_9(branch, revision_id, submit_branch, public_branch,
no_patch, no_bundle, message,
base_revision_id, local_target_branch=None):
if not no_bundle:
if not no_patch:
patch_type = 'bundle'
else:
raise errors.BzrCommandError(gettext('Format 0.9 does not'
' permit bundle with no patch'))
else:
if not no_patch:
patch_type = 'diff'
else:
patch_type = None
from bzrlib import merge_directive
return merge_directive.MergeDirective.from_objects(
branch.repository, revision_id, time.time(),
osutils.local_time_offset(), submit_branch,
public_branch=public_branch, patch_type=patch_type,
message=message, local_target_branch=local_target_branch)
format_registry.register('4',
_send_4, 'Bundle format 4, Merge Directive 2 (default)')
format_registry.register('0.9',
_send_0_9, 'Bundle format 0.9, Merge Directive 1')
format_registry.default_key = '4'
| Distrotech/bzr | bzrlib/send.py | Python | gpl-2.0 | 8,490 |
from tasks import func
func.delay(1, 2)
| emanuelvianna/microcelery | tests/client.py | Python | gpl-2.0 | 41 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the search engine."""
__revision__ = \
"$Id$"
from invenio.testutils import InvenioTestCase
from invenio import search_engine
from invenio.testutils import make_test_suite, run_test_suite
from invenio.config import CFG_CERN_SITE
class TestMiscUtilityFunctions(InvenioTestCase):
"""Test whatever non-data-specific utility functions are essential."""
def test_ziplist2x2(self):
"""search engine - ziplist 2 x 2"""
self.assertEqual(search_engine.ziplist([1, 2], [3, 4]), [[1, 3], [2, 4]])
def test_ziplist3x3(self):
"""search engine - ziplist 3 x 3"""
self.assertEqual(search_engine.ziplist([1, 2, 3], ['a', 'b', 'c'], [9, 8, 7]),
[[1, 'a', 9], [2, 'b', 8], [3, 'c', 7]])
class TestWashQueryParameters(InvenioTestCase):
"""Test for washing of search query parameters."""
def test_wash_pattern(self):
"""search engine - washing of query patterns"""
self.assertEqual("Ellis, J", search_engine.wash_pattern('Ellis, J'))
#self.assertEqual("ell", search_engine.wash_pattern('ell*'))
def test_wash_dates_from_tuples(self):
"""search engine - washing of date arguments from (year,month,day) tuples"""
self.assertEqual(search_engine.wash_dates(d1y=1980, d1m=1, d1d=28, d2y=2003, d2m=2, d2d=3),
('1980-01-28 00:00:00', '2003-02-03 00:00:00'))
self.assertEqual(search_engine.wash_dates(d1y=1980, d1m=0, d1d=28, d2y=2003, d2m=2, d2d=0),
('1980-01-28 00:00:00', '2003-02-31 00:00:00'))
def test_wash_dates_from_datetexts(self):
"""search engine - washing of date arguments from datetext strings"""
self.assertEqual(search_engine.wash_dates(d1="1980-01-28 01:02:03", d2="1980-01-29 12:34:56"),
('1980-01-28 01:02:03', '1980-01-29 12:34:56'))
self.assertEqual(search_engine.wash_dates(d1="1980-01-28 01:02:03"),
('1980-01-28 01:02:03', '9999-12-31 00:00:00'))
self.assertEqual(search_engine.wash_dates(d2="1980-01-29 12:34:56"),
('0000-01-01 00:00:00', '1980-01-29 12:34:56'))
def test_wash_dates_from_both(self):
"""search engine - washing of date arguments from both datetext strings and (year,month,day) tuples"""
# datetext mode takes precedence, d1* should be ignored
self.assertEqual(search_engine.wash_dates(d1="1980-01-28 01:02:03", d1y=1980, d1m=1, d1d=28),
('1980-01-28 01:02:03', '9999-12-31 00:00:00'))
# datetext mode takes precedence, d2 missing, d2* should be ignored
self.assertEqual(search_engine.wash_dates(d1="1980-01-28 01:02:03", d2y=2003, d2m=2, d2d=3),
('1980-01-28 01:02:03', '2003-02-03 00:00:00'))
class TestQueryParser(InvenioTestCase):
"""Test of search pattern (or query) parser."""
def _check(self, p, f, m, result_wanted):
"Internal checking function calling create_basic_search_units."
result_obtained = search_engine.create_basic_search_units(None, p, f, m)
assert result_obtained == result_wanted, \
'obtained %s instead of %s' % (repr(result_obtained),
repr(result_wanted))
return
def test_parsing_single_word_query(self):
"search engine - parsing single word queries"
self._check('word', '', None, [['+', 'word', '', 'w']])
def test_parsing_single_word_with_boolean_operators(self):
"search engine - parsing single word queries"
self._check('+word', '', None, [['+', 'word', '', 'w']])
self._check('-word', '', None, [['-', 'word', '', 'w']])
self._check('|word', '', None, [['|', 'word', '', 'w']])
def test_parsing_single_word_in_field(self):
"search engine - parsing single word queries in a logical field"
self._check('word', 'title', None, [['+', 'word', 'title', 'w']])
def test_parsing_single_word_in_tag(self):
"search engine - parsing single word queries in a physical tag"
self._check('word', '500', None, [['+', 'word', '500', 'a']])
def test_parsing_query_with_commas(self):
"search engine - parsing queries with commas"
self._check('word,word', 'title', None,
[['+', 'word,word', 'title', 'a']])
def test_parsing_exact_phrase_query(self):
"search engine - parsing exact phrase"
self._check('"the word"', 'title', None,
[['+', 'the word', 'title', 'a']])
def test_parsing_exact_phrase_query_unbalanced(self):
"search engine - parsing unbalanced exact phrase"
self._check('"the word', 'title', None,
[['+', '"the', 'title', 'w'],
['+', 'word', 'title', 'w']])
def test_parsing_exact_phrase_query_in_any_field(self):
"search engine - parsing exact phrase in any field"
self._check('"the word"', '', None,
[['+', 'the word', '', 'a']])
def test_parsing_partial_phrase_query(self):
"search engine - parsing partial phrase"
self._check("'the word'", 'title', None,
[['+', '%the word%', 'title', 'a']])
def test_parsing_partial_phrase_query_unbalanced(self):
"search engine - parsing unbalanced partial phrase"
self._check("'the word", 'title', None,
[['+', "'the", 'title', 'w'],
['+', "word", 'title', 'w']])
def test_parsing_partial_phrase_query_in_any_field(self):
"search engine - parsing partial phrase in any field"
self._check("'the word'", '', None,
[['+', '%the word%', '', 'a']])
def test_parsing_regexp_query(self):
"search engine - parsing regex matches"
self._check("/the word/", 'title', None,
[['+', 'the word', 'title', 'r']])
def test_parsing_regexp_query_unbalanced(self):
"search engine - parsing unbalanced regexp"
self._check("/the word", 'title', None,
[['+', '/the', 'title', 'w'],
['+', 'word', 'title', 'w']])
def test_parsing_regexp_query_in_any_field(self):
"search engine - parsing regexp searches in any field"
self._check("/the word/", '', None,
[['+', 'the word', '', 'r']])
def test_parsing_boolean_query(self):
"search engine - parsing boolean query with several words"
self._check("muon kaon ellis cern", '', None,
[['+', 'muon', '', 'w'],
['+', 'kaon', '', 'w'],
['+', 'ellis', '', 'w'],
['+', 'cern', '', 'w']])
def test_parsing_boolean_query_with_word_operators(self):
"search engine - parsing boolean query with word operators"
self._check("muon and kaon or ellis not cern", '', None,
[['+', 'muon', '', 'w'],
['+', 'kaon', '', 'w'],
['|', 'ellis', '', 'w'],
['-', 'cern', '', 'w']])
def test_parsing_boolean_query_with_symbol_operators(self):
"search engine - parsing boolean query with symbol operators"
self._check("muon +kaon |ellis -cern", '', None,
[['+', 'muon', '', 'w'],
['+', 'kaon', '', 'w'],
['|', 'ellis', '', 'w'],
['-', 'cern', '', 'w']])
def test_parsing_boolean_query_with_symbol_operators_and_spaces(self):
"search engine - parsing boolean query with operators and spaces"
self._check("muon + kaon | ellis - cern", '', None,
[['+', 'muon', '', 'w'],
['+', 'kaon', '', 'w'],
['|', 'ellis', '', 'w'],
['-', 'cern', '', 'w']])
def test_parsing_boolean_query_with_symbol_operators_and_no_spaces(self):
"search engine - parsing boolean query with operators and no spaces"
self._check("muon+kaon|ellis-cern", '', None,
[['+', 'muon+kaon|ellis-cern', '', 'w']])
def test_parsing_structured_query_existing(self):
"search engine - parsing structured query, existing index"
self._check("title:muon", '', None,
[['+', 'muon', 'title', 'w']])
if not CFG_CERN_SITE:
def test_parsing_structured_query_existing_field(self):
"search engine - parsing structured query, existing field, but no word index"
self._check("experiment:LHC", '', None,
[['+', 'LHC', 'experiment', 'a']])
def test_parsing_structured_query_nonexisting(self):
"search engine - parsing structured query, non-existing index"
self._check("foo:muon", '', None,
[['+', 'foo:muon', '', 'w']])
def test_parsing_structured_query_marc(self):
"search engine - parsing structured query, MARC-tag defined index"
self._check("245:muon", '', None,
[['+', 'muon', '245', 'a']])
def test_parsing_combined_structured_query(self):
"search engine - parsing combined structured query"
self._check("title:muon author:ellis", '', None,
[['+', 'muon', 'title', 'w'],
['+', 'ellis', 'author', 'w']])
def test_parsing_structured_regexp_query(self):
"search engine - parsing structured regexp query"
self._check("title:/(one|two)/", '', None,
[['+', '(one|two)', 'title', 'r']])
def test_parsing_structured_regexp_marc_query(self):
"search engine - parsing structured regexp MARC query"
self._check("245__a:/(one|two)/", '', None,
[['+', '(one|two)', '245__a', 'r']])
def test_parsing_structured_regexp_refersto_query(self):
"search engine - parsing structured regexp refersto query"
self._check("refersto:/(one|two)/", '', None,
[['+', '(one|two)', 'refersto', 'r']])
def test_parsing_combined_structured_query_in_a_field(self):
"search engine - parsing structured query in a field"
self._check("title:muon author:ellis", 'abstract', None,
[['+', 'muon', 'title', 'w'],
['+', 'ellis', 'author', 'w']])
def test_parsing_colons_and_spaces_well_structured(self):
"search engine - parsing query with colons and spaces, well structured"
self._check("title: muon author:ellis keyword: kaon", 'abstract', None,
[['+', 'muon', 'title', 'w'],
['+', 'ellis', 'author', 'w'],
['+', 'kaon', 'keyword', 'w']])
def test_parsing_colons_and_spaces_badly_structured(self):
"search engine - parsing query with colons and spaces, badly structured"
self._check("foo: bar", 'title', None,
[['+', 'foo', 'title', 'w'],
['+', 'bar', 'title', 'w']])
def test_parsing_colons_and_spaces_for_phrase_query(self):
"search engine - parsing query with colons and spaces, phrase query"
self._check('author: "Ellis, J"', None, None,
[['+', 'Ellis, J', 'author', 'a']])
def test_search_pattern_with_equal_sign(self):
"search engine - parsing query with equal sign"
self._check('title:"s = 630"', None, None,
[['+', 's = 630', 'title', 'a']])
TEST_SUITE = make_test_suite(TestWashQueryParameters,
TestQueryParser,
TestMiscUtilityFunctions)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| mvesper/invenio | modules/websearch/lib/search_engine_unit_tests.py | Python | gpl-2.0 | 12,575 |
# -*- coding: iso-8859-1 -*-
# (c) 2006 Stephan Reichholf
# This Software is Free, use it where you want, when you want for whatever you want and modify it if you want but don't remove my copyright!
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Screens.MessageBox import MessageBox
from Components.ActionMap import NumberActionMap
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Plugins.Plugin import PluginDescriptor
from Components.config import config
from Tools.Directories import resolveFilename, SCOPE_PLUGINS
from enigma import eEnv
import os
SKINXML = "skin.xml"
DEFAULTSKIN = "<Default Skin>"
class SkinSelector(Screen):
# for i18n:
# _("Choose your Skin")
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2")
def __init__(self, session, args = None):
Screen.__init__(self, session)
self.skinlist = []
self.previewPath = ""
if os.path.exists(os.path.join(self.root, SKINXML)):
self.skinlist.append(DEFAULTSKIN)
for root, dirs, files in os.walk(self.root, followlinks=True):
for subdir in dirs:
dir = os.path.join(root,subdir)
if os.path.exists(os.path.join(dir,SKINXML)):
self.skinlist.append(subdir)
dirs = []
self["key_red"] = StaticText(_("Close"))
self["introduction"] = StaticText(_("Press OK to activate the selected skin."))
self.skinlist.sort()
self["SkinList"] = MenuList(self.skinlist)
self["Preview"] = Pixmap()
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "EPGSelectActions"],
{
"ok": self.ok,
"back": self.close,
"red": self.close,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"info": self.info,
}, -1)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
tmp = config.skin.primary_skin.value.find("/"+SKINXML)
if tmp != -1:
tmp = config.skin.primary_skin.value[:tmp]
idx = 0
for skin in self.skinlist:
if skin == tmp:
break
idx += 1
if idx < len(self.skinlist):
self["SkinList"].moveToIndex(idx)
self.loadPreview()
def up(self):
self["SkinList"].up()
self.loadPreview()
def down(self):
self["SkinList"].down()
self.loadPreview()
def left(self):
self["SkinList"].pageUp()
self.loadPreview()
def right(self):
self["SkinList"].pageDown()
self.loadPreview()
def info(self):
aboutbox = self.session.open(MessageBox,_("STB-GUI Skinselector\n\nIf you experience any problems please contact\[email protected]\n\n\xA9 2006 - Stephan Reichholf"), MessageBox.TYPE_INFO)
aboutbox.setTitle(_("About..."))
def ok(self):
if self["SkinList"].getCurrent() == DEFAULTSKIN:
self.skinfile = "."
else:
self.skinfile = self["SkinList"].getCurrent()
self.skinfile = os.path.join(self.skinfile, SKINXML)
print "Skinselector: Selected Skin: "+self.root+self.skinfile
restartbox = self.session.openWithCallback(self.restartGUI,MessageBox,_("GUI needs a restart to apply a new skin\nDo you want to restart the GUI now?"), MessageBox.TYPE_YESNO)
restartbox.setTitle(_("Restart GUI now?"))
def loadPreview(self):
if self["SkinList"].getCurrent() == DEFAULTSKIN:
pngpath = "."
else:
pngpath = self["SkinList"].getCurrent()
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
if not os.path.exists(pngpath):
pngpath = resolveFilename(SCOPE_PLUGINS, "SystemPlugins/SkinSelector/noprev.png")
if self.previewPath != pngpath:
self.previewPath = pngpath
self["Preview"].instance.setPixmapFromFile(self.previewPath)
def restartGUI(self, answer):
if answer is True:
config.skin.primary_skin.value = self.skinfile
config.skin.primary_skin.save()
self.session.open(TryQuitMainloop, 3)
def SkinSelMain(session, **kwargs):
session.open(SkinSelector)
def SkinSelSetup(menuid, **kwargs):
if menuid == "ui_menu":
return [(_("Skin"), SkinSelMain, "skin_selector", None)]
else:
return []
def Plugins(**kwargs):
return PluginDescriptor(name="Skinselector", description="Select Your Skin", where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc=SkinSelSetup)
| kajgan/e2 | lib/python/Plugins/SystemPlugins/SkinSelector/plugin.py | Python | gpl-2.0 | 4,208 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : Dockable MirrorMap
Description : Creates a dockable map canvas
Date : February 1, 2011
copyright : (C) 2011 by Giuseppe Sucameli (Faunalia)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import resources_rc
class DockableMirrorMapPlugin:
def __init__(self, iface):
# Save a reference to the QGIS iface
self.iface = iface
def initGui(self):
self.dockableMirrors = []
self.lastDockableMirror = 0
self.dockableAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/dockablemirrormap.png"), "Dockable MirrorMap", self.iface.mainWindow())
QObject.connect(self.dockableAction, SIGNAL("triggered()"), self.runDockableMirror)
self.aboutAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/about.png"), "About", self.iface.mainWindow())
QObject.connect(self.aboutAction, SIGNAL("triggered()"), self.about)
# Add to the plugin menu and toolbar
self.iface.addPluginToMenu("Dockable MirrorMap", self.dockableAction)
self.iface.addPluginToMenu("Dockable MirrorMap", self.aboutAction)
self.iface.addToolBarIcon(self.dockableAction)
QObject.connect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.connect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
def unload(self):
QObject.disconnect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.disconnect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
self.removeDockableMirrors()
# Remove the plugin
self.iface.removePluginMenu("Dockable MirrorMap",self.dockableAction)
self.iface.removePluginMenu("Dockable MirrorMap",self.aboutAction)
self.iface.removeToolBarIcon(self.dockableAction)
def about(self):
from DlgAbout import DlgAbout
DlgAbout(self.iface.mainWindow()).exec_()
def removeDockableMirrors(self):
for d in list(self.dockableMirrors):
d.close()
self.dockableMirrors = []
self.lastDockableMirror = 0
def runDockableMirror(self):
from dockableMirrorMap import DockableMirrorMap
wdg = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = wdg.minimumSize()
maxsize = wdg.maximumSize()
self.setupDockWidget(wdg)
self.addDockWidget(wdg)
wdg.setMinimumSize(minsize)
wdg.setMaximumSize(maxsize)
if wdg.isFloating():
wdg.move(50, 50) # move the widget to the center
def setupDockWidget(self, wdg):
othersize = QGridLayout().verticalSpacing()
if len(self.dockableMirrors) <= 0:
width = self.iface.mapCanvas().size().width()/2 - othersize
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumWidth( width )
wdg.setMaximumWidth( width )
elif len(self.dockableMirrors) == 1:
height = self.dockableMirrors[0].size().height()/2 - othersize/2
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
elif len(self.dockableMirrors) == 2:
height = self.iface.mapCanvas().size().height()/2 - othersize/2
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
else:
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setFloating( True )
def addDockWidget(self, wdg, position=None):
if position == None:
position = wdg.getLocation()
else:
wdg.setLocation( position )
mapCanvas = self.iface.mapCanvas()
oldSize = mapCanvas.size()
prevFlag = mapCanvas.renderFlag()
mapCanvas.setRenderFlag(False)
self.iface.addDockWidget(position, wdg)
wdg.setNumber( self.lastDockableMirror )
self.lastDockableMirror = self.lastDockableMirror+1
self.dockableMirrors.append( wdg )
QObject.connect(wdg, SIGNAL( "closed(PyQt_PyObject)" ), self.onCloseDockableMirror)
newSize = mapCanvas.size()
if newSize != oldSize:
# trick: update the canvas size
mapCanvas.resize(newSize.width() - 1, newSize.height())
mapCanvas.setRenderFlag(prevFlag)
mapCanvas.resize(newSize)
else:
mapCanvas.setRenderFlag(prevFlag)
def onCloseDockableMirror(self, wdg):
if self.dockableMirrors.count( wdg ) > 0:
self.dockableMirrors.remove( wdg )
if len(self.dockableMirrors) <= 0:
self.lastDockableMirror = 0
def onWriteProject(self, domproject):
if len(self.dockableMirrors) <= 0:
return
QgsProject.instance().writeEntry( "DockableMirrorMap", "/numMirrors", len(self.dockableMirrors) )
for i, dockwidget in enumerate(self.dockableMirrors):
# save position and geometry
floating = dockwidget.isFloating()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/floating" % i, floating )
if floating:
position = "%s %s" % (dockwidget.pos().x(), dockwidget.pos().y())
else:
position = u"%s" % dockwidget.getLocation()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/position" % i, str(position) )
size = "%s %s" % (dockwidget.size().width(), dockwidget.size().height())
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/size" % i, str(size) )
# save the layer list
layerIds = dockwidget.getMirror().getLayerSet()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/layers" % i, layerIds )
scaleFactor = dockwidget.getMirror().scaleFactor.value()
QgsProject.instance().writeEntryDouble("DockableMirrorMap", "/mirror%s/scaleFactor" % i, scaleFactor)
def onProjectLoaded(self):
# restore mirrors?
num, ok = QgsProject.instance().readNumEntry("DockableMirrorMap", "/numMirrors")
if not ok or num <= 0:
return
# remove all mirrors
self.removeDockableMirrors()
mirror2lids = {}
# load mirrors
for i in range(num):
if num >= 2:
if i == 0:
prevFlag = self.iface.mapCanvas().renderFlag()
self.iface.mapCanvas().setRenderFlag(False)
elif i == num-1:
self.iface.mapCanvas().setRenderFlag(True)
from dockableMirrorMap import DockableMirrorMap
dockwidget = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = dockwidget.minimumSize()
maxsize = dockwidget.maximumSize()
# restore position
floating, ok = QgsProject.instance().readBoolEntry("DockableMirrorMap", "/mirror%s/floating" % i)
if ok:
dockwidget.setFloating( floating )
position, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/position" % i)
if ok:
try:
if floating:
parts = position.split(" ")
if len(parts) >= 2:
dockwidget.move( int(parts[0]), int(parts[1]) )
else:
dockwidget.setLocation( int(position) )
except ValueError:
pass
# restore geometry
dockwidget.setFixedSize( dockwidget.geometry().width(), dockwidget.geometry().height() )
size, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/size" % i)
if ok:
try:
parts = size.split(" ")
dockwidget.setFixedSize( int(parts[0]), int(parts[1]) )
except ValueError:
pass
scaleFactor, ok = QgsProject.instance().readDoubleEntry("DockableMirrorMap", "/mirror%s/scaleFactor" % i, 1.0)
if ok: dockwidget.getMirror().scaleFactor.setValue( scaleFactor )
# get layer list
layerIds, ok = QgsProject.instance().readListEntry("DockableMirrorMap", "/mirror%s/layers" % i)
if ok: dockwidget.getMirror().setLayerSet( layerIds )
self.addDockWidget( dockwidget )
dockwidget.setMinimumSize(minsize)
dockwidget.setMaximumSize(maxsize)
| alfanugraha/LUMENS-repo | processing/DockableMirrorMap/dockableMirrorMapPlugin.py | Python | gpl-2.0 | 8,306 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='StaticPage',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, primary_key=True, verbose_name='ID')),
('url', models.CharField(verbose_name='URL', db_index=True, max_length=100)),
('title', models.CharField(verbose_name='title', max_length=200)),
('title_ru', models.CharField(null=True, verbose_name='title', max_length=200)),
('title_en', models.CharField(null=True, verbose_name='title', max_length=200)),
('content', models.TextField(blank=True, verbose_name='content')),
('content_ru', models.TextField(null=True, blank=True, verbose_name='content')),
('content_en', models.TextField(null=True, blank=True, verbose_name='content')),
('template_name', models.CharField(help_text="Example: 'staticpages/contact_page.html'. If this isn't provided, the system will use 'staticpages/default.html'.", verbose_name='template name', blank=True, max_length=70)),
],
options={
'verbose_name_plural': 'static pages',
'ordering': ('url',),
'verbose_name': 'static page',
},
bases=(models.Model,),
),
]
| null-none/OpenGain | default_set/staticpages/migrations/0001_initial.py | Python | gpl-2.0 | 1,523 |
from django.db import models
from django.utils import timezone
class Hcmeta(models.Model):
hcver = models.IntegerField(blank=True, null=True)
org_id = models.CharField(max_length=50, blank=True, null=True)
details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_hcmeta'
class SfEventLog(models.Model):
table_name = models.CharField(max_length=128, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
synced_at = models.DateTimeField(blank=True, null=True)
sf_timestamp = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(max_length=20, blank=True, null=True)
record = models.TextField(blank=True, null=True)
processed = models.BooleanField(null=True)
class Meta:
managed = False
db_table = '_sf_event_log'
class TriggerLog(models.Model):
txid = models.BigIntegerField(blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
updated_at = models.DateTimeField(blank=True, null=True)
processed_at = models.DateTimeField(blank=True, null=True)
processed_tx = models.BigIntegerField(blank=True, null=True)
state = models.CharField(max_length=8, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
table_name = models.CharField(max_length=128, blank=True, null=True)
record_id = models.IntegerField(blank=True, null=True)
sfid = models.CharField(max_length=18, blank=True, null=True)
old = models.TextField(blank=True, null=True)
values = models.TextField(blank=True, null=True)
sf_result = models.IntegerField(blank=True, null=True)
sf_message = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_trigger_log'
class TriggerLogArchive(models.Model):
id = models.IntegerField(primary_key=True)
txid = models.BigIntegerField(blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
updated_at = models.DateTimeField(blank=True, null=True)
processed_at = models.DateTimeField(blank=True, null=True)
processed_tx = models.BigIntegerField(blank=True, null=True)
state = models.CharField(max_length=8, blank=True, null=True)
action = models.CharField(max_length=7, blank=True, null=True)
table_name = models.CharField(max_length=128, blank=True, null=True)
record_id = models.IntegerField(blank=True, null=True)
sfid = models.CharField(max_length=18, blank=True, null=True)
old = models.TextField(blank=True, null=True)
values = models.TextField(blank=True, null=True)
sf_result = models.IntegerField(blank=True, null=True)
sf_message = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = '_trigger_log_archive'
class SiteAccount(models.Model):
jigsaw = models.CharField(max_length=20, blank=True, null=True)
shippinglongitude = models.FloatField(blank=True, null=True)
shippingstate = models.CharField(max_length=80, blank=True, null=True)
youtubeid = models.CharField(db_column='youtubeid__c', max_length=80, blank=True, null=True)
numberofemployees = models.IntegerField(blank=True, null=True)
parent = models.ForeignKey('SiteAccount', to_field='sfid', db_column='parentid',
on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
recordtypeid = models.CharField(max_length=18, blank=True, null=True)
shippingpostalcode = models.CharField(max_length=20, blank=True, null=True)
billingcity = models.CharField(max_length=40, blank=True, null=True)
billinglatitude = models.FloatField(blank=True, null=True)
accountsource = models.CharField(max_length=40, blank=True, null=True)
shippingcountry = models.CharField(max_length=80, blank=True, null=True)
lastvieweddate = models.DateTimeField(blank=True, null=True)
shippinggeocodeaccuracy = models.CharField(max_length=40, blank=True, null=True)
last_el_update = models.DateTimeField(db_column='last_el_update__c', blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
site_el_raised = models.FloatField(db_column='site_el_raised__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
phone = models.CharField(max_length=40, blank=True, null=True)
masterrecordid = models.CharField(max_length=18, blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
isdeleted = models.BooleanField(null=True)
site_el_goal = models.FloatField(db_column='site_el_goal__c', blank=True, null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
el_id = models.CharField(db_column='el_id__c', max_length=80, blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
shippingstreet = models.CharField(max_length=255, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
billingpostalcode = models.CharField(max_length=20, blank=True, null=True)
billinglongitude = models.FloatField(blank=True, null=True)
twitchid = models.CharField(db_column='twitchid__c', max_length=80, blank=True, null=True)
twitterid = models.CharField(db_column='twitterid__c', max_length=80, blank=True, null=True)
createddate = models.DateTimeField(blank=True, null=True)
billingstate = models.CharField(max_length=80, blank=True, null=True)
supplies = models.TextField(db_column='supplies__c', blank=True, null=True)
jigsawcompanyid = models.CharField(max_length=20, blank=True, null=True)
shippingcity = models.CharField(max_length=40, blank=True, null=True)
shippinglatitude = models.FloatField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
type = models.CharField(max_length=40, blank=True, null=True)
website = models.CharField(max_length=255, blank=True, null=True)
billingcountry = models.CharField(max_length=80, blank=True, null=True)
description = models.TextField(blank=True, null=True)
billinggeocodeaccuracy = models.CharField(max_length=40, blank=True, null=True)
photourl = models.CharField(max_length=255, blank=True, null=True)
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sicdesc = models.CharField(max_length=80, blank=True, null=True)
industry = models.CharField(max_length=40, blank=True, null=True)
billingstreet = models.CharField(max_length=255, blank=True, null=True)
site_email = models.CharField(db_column='site_email__c', max_length=80, blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
site_info = models.TextField(db_column='site_info__c', blank=True, null=True)
nerd_in_chief = models.CharField(db_column='nerd_in_chief__c', max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
# contacturl = models.CharField(db_column='contacturl__c', max_length=1300, blank=True, null=True)
islocked = models.BooleanField(null=True)
loot_guard = models.CharField(db_column='loot_guard__c', max_length=18, blank=True, null=True)
class Meta:
managed = False
db_table = 'account'
def has_events(self):
""" Return True if this account has upcoming events """
return Event.objects.filter(event_start_date__gte=timezone.now(), site=self).count() > 0
def upcoming(self):
return self.events.filter(event_start_date__gte=timezone.now()).order_by('event_start_date').all()
def past(self):
return self.events.filter(event_start_date__lt=timezone.now()).order_by('-event_start_date').all()
class Contact(models.Model):
lastname = models.CharField(max_length=80, blank=True, null=True)
account = models.ForeignKey(SiteAccount, to_field='sfid', db_column='accountid', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
name = models.CharField(max_length=121, blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
department = models.CharField(max_length=80, blank=True, null=True)
extra_life_id = models.CharField(db_column='extra_life_id__c', unique=True, max_length=20, blank=True, null=True)
fragforce_org_user = models.CharField(db_column='fragforce_org_user__c', max_length=18, blank=True, null=True)
title = models.CharField(max_length=128, blank=True, null=True)
firstname = models.CharField(max_length=40, blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
def donate_link(self):
if self.extra_life_id:
return "https://www.extra-life.org/index.cfm?fuseaction=donate.participant&participantID=%d" % (
int(self.extra_life_id),
)
raise ValueError("No extra life id set for %r" % self)
class Meta:
managed = False
db_table = 'contact'
class ELHistory(models.Model):
currencyisocode = models.CharField(max_length=3, blank=True, null=True)
contact = models.ForeignKey(Contact, to_field='sfid', db_column='contact__c', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
year = models.CharField(db_column='year__c', max_length=255, blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
raised = models.FloatField(db_column='raised__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
isdeleted = models.BooleanField(null=True)
goal = models.FloatField(db_column='goal__c', blank=True, null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
el_id = models.CharField(db_column='el_id__c', max_length=7, blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
islocked = models.BooleanField(null=True)
createddate = models.DateTimeField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
site = models.ForeignKey(SiteAccount, to_field='sfid', db_column='site__c', on_delete=models.CASCADE, max_length=18,
blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
class Meta:
managed = False
db_table = 'el_history__c'
class Event(models.Model):
lastvieweddate = models.DateTimeField(blank=True, null=True)
volunteerforce_link = models.CharField(db_column='volunteerforce_link__c', max_length=255, blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
event_end_date = models.DateTimeField(db_column='event_end_date__c', blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
isdeleted = models.BooleanField(null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
event_start_date = models.DateTimeField(db_column='event_start_date__c', blank=True, null=True)
createddate = models.DateTimeField(blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
site = models.ForeignKey(SiteAccount, to_field='sfid', db_column='site__c', on_delete=models.CASCADE, max_length=18,
blank=True, null=True, related_name='events')
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
use_secondary_address = models.BooleanField(db_column='use_secondary_address__c', null=True)
stream_recording_link = models.CharField(db_column='stream_recording_link__c', max_length=255, blank=True,
null=True)
# participant_count = models.FloatField(db_column='participant_count__c', blank=True, null=True)
# prereg_url = models.CharField(db_column='prereg_url__c', max_length=1300, blank=True, null=True)
mayedit = models.BooleanField(null=True)
# open_for_preregistration = models.BooleanField(db_column='open_for_preregistration__c', null=True)
islocked = models.BooleanField(null=True)
# signinurl = models.CharField(db_column='signinurl__c', max_length=1300, blank=True, null=True)
# event_address_lookup = models.CharField(db_column='event_address_lookup__c', max_length=1300, blank=True, null=True)
event_information = models.TextField(db_column='event_information__c', blank=True, null=True)
# open_for_registration = models.BooleanField(db_column='open_for_registration__c', null=True)
# Short description of the event
description = models.TextField(db_column='description__c', blank=True, null=True)
class Meta:
managed = False
db_table = 'fragforce_event__c'
class EventParticipant(models.Model):
contact = models.ForeignKey(Contact, to_field='sfid', db_column='contact__c', on_delete=models.CASCADE,
max_length=18, blank=True, null=True)
lastvieweddate = models.DateTimeField(blank=True, null=True)
name = models.CharField(max_length=80, blank=True, null=True)
lastmodifieddate = models.DateTimeField(blank=True, null=True)
ownerid = models.CharField(max_length=18, blank=True, null=True)
mayedit = models.BooleanField(null=True)
event = models.ForeignKey(Event, to_field='sfid', db_column='fragforce_event__c', on_delete=models.CASCADE,
max_length=18, blank=True,
null=True)
isdeleted = models.BooleanField(null=True)
participant = models.BooleanField(db_column='participant__c', null=True)
systemmodstamp = models.DateTimeField(blank=True, null=True)
lastmodifiedbyid = models.CharField(max_length=18, blank=True, null=True)
lastactivitydate = models.DateField(blank=True, null=True)
islocked = models.BooleanField(null=True)
createddate = models.DateTimeField(blank=True, null=True)
name = models.CharField(db_column='name__c', max_length=120, blank=True, null=True)
createdbyid = models.CharField(max_length=18, blank=True, null=True)
lastreferenceddate = models.DateTimeField(blank=True, null=True)
sfid = models.CharField(unique=True, max_length=18, blank=True, null=True)
field_hc_lastop = models.CharField(db_column='_hc_lastop', max_length=32, blank=True, null=True)
field_hc_err = models.TextField(db_column='_hc_err', blank=True, null=True)
class Meta:
managed = False
db_table = 'event_participant__c'
| AevumDecessus/fragforce.org | ffsfdc/models.py | Python | gpl-2.0 | 15,743 |
# OeQ autogenerated correlation for 'Window/Wall Ratio South in Correlation to the Building Age'
import math
import numpy as np
from . import oeqCorrelation as oeq
def get(*xin):
# OeQ autogenerated correlation for 'Window to Wall Ratio in Southern Direction'
A_WIN_S_BY_AW= oeq.correlation(
const= 20818.6194135,
a= -42.6513518642,
b= 0.0327511835635,
c= -1.11718058834e-05,
d= 1.42836626434e-09,
mode= "lin")
return dict(A_WIN_S_BY_AW=A_WIN_S_BY_AW.lookup(*xin))
| UdK-VPT/Open_eQuarter | mole3/stat_corr/window_wall_ratio_south_MFH_by_building_age_correlation.py | Python | gpl-2.0 | 521 |
# Copyright (C) 2008 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import absolute_import
import collections
from cStringIO import StringIO
from bzrlib import (
debug,
errors,
)
from bzrlib.trace import mutter
class MessageHandler(object):
"""Base class for handling messages received via the smart protocol.
As parts of a message are received, the corresponding PART_received method
will be called.
"""
def __init__(self):
self.headers = None
def headers_received(self, headers):
"""Called when message headers are received.
This default implementation just stores them in self.headers.
"""
self.headers = headers
def byte_part_received(self, byte):
"""Called when a 'byte' part is received.
Note that a 'byte' part is a message part consisting of exactly one
byte.
"""
raise NotImplementedError(self.byte_received)
def bytes_part_received(self, bytes):
"""Called when a 'bytes' part is received.
A 'bytes' message part can contain any number of bytes. It should not
be confused with a 'byte' part, which is always a single byte.
"""
raise NotImplementedError(self.bytes_received)
def structure_part_received(self, structure):
"""Called when a 'structure' part is received.
:param structure: some structured data, which will be some combination
of list, dict, int, and str objects.
"""
raise NotImplementedError(self.bytes_received)
def protocol_error(self, exception):
"""Called when there is a protocol decoding error.
The default implementation just re-raises the exception.
"""
raise
def end_received(self):
"""Called when the end of the message is received."""
# No-op by default.
pass
class ConventionalRequestHandler(MessageHandler):
"""A message handler for "conventional" requests.
"Conventional" is used in the sense described in
doc/developers/network-protocol.txt: a simple message with arguments and an
optional body.
Possible states:
* args: expecting args
* body: expecting body (terminated by receiving a post-body status)
* error: expecting post-body error
* end: expecting end of message
* nothing: finished
"""
def __init__(self, request_handler, responder):
MessageHandler.__init__(self)
self.request_handler = request_handler
self.responder = responder
self.expecting = 'args'
self._should_finish_body = False
self._response_sent = False
def protocol_error(self, exception):
if self.responder.response_sent:
# We can only send one response to a request, no matter how many
# errors happen while processing it.
return
self.responder.send_error(exception)
def byte_part_received(self, byte):
if self.expecting == 'body':
if byte == 'S':
# Success. Nothing more to come except the end of message.
self.expecting = 'end'
elif byte == 'E':
# Error. Expect an error structure.
self.expecting = 'error'
else:
raise errors.SmartProtocolError(
'Non-success status byte in request body: %r' % (byte,))
else:
raise errors.SmartProtocolError(
'Unexpected message part: byte(%r)' % (byte,))
def structure_part_received(self, structure):
if self.expecting == 'args':
self._args_received(structure)
elif self.expecting == 'error':
self._error_received(structure)
else:
raise errors.SmartProtocolError(
'Unexpected message part: structure(%r)' % (structure,))
def _args_received(self, args):
self.expecting = 'body'
self.request_handler.args_received(args)
if self.request_handler.finished_reading:
self._response_sent = True
self.responder.send_response(self.request_handler.response)
self.expecting = 'end'
def _error_received(self, error_args):
self.expecting = 'end'
self.request_handler.post_body_error_received(error_args)
def bytes_part_received(self, bytes):
if self.expecting == 'body':
self._should_finish_body = True
self.request_handler.accept_body(bytes)
else:
raise errors.SmartProtocolError(
'Unexpected message part: bytes(%r)' % (bytes,))
def end_received(self):
if self.expecting not in ['body', 'end']:
raise errors.SmartProtocolError(
'End of message received prematurely (while expecting %s)'
% (self.expecting,))
self.expecting = 'nothing'
self.request_handler.end_received()
if not self.request_handler.finished_reading:
raise errors.SmartProtocolError(
"Complete conventional request was received, but request "
"handler has not finished reading.")
if not self._response_sent:
self.responder.send_response(self.request_handler.response)
class ResponseHandler(object):
"""Abstract base class for an object that handles a smart response."""
def read_response_tuple(self, expect_body=False):
"""Reads and returns the response tuple for the current request.
:keyword expect_body: a boolean indicating if a body is expected in the
response. Some protocol versions needs this information to know
when a response is finished. If False, read_body_bytes should
*not* be called afterwards. Defaults to False.
:returns: tuple of response arguments.
"""
raise NotImplementedError(self.read_response_tuple)
def read_body_bytes(self, count=-1):
"""Read and return some bytes from the body.
:param count: if specified, read up to this many bytes. By default,
reads the entire body.
:returns: str of bytes from the response body.
"""
raise NotImplementedError(self.read_body_bytes)
def read_streamed_body(self):
"""Returns an iterable that reads and returns a series of body chunks.
"""
raise NotImplementedError(self.read_streamed_body)
def cancel_read_body(self):
"""Stop expecting a body for this response.
If expect_body was passed to read_response_tuple, this cancels that
expectation (and thus finishes reading the response, allowing a new
request to be issued). This is useful if a response turns out to be an
error rather than a normal result with a body.
"""
raise NotImplementedError(self.cancel_read_body)
class ConventionalResponseHandler(MessageHandler, ResponseHandler):
def __init__(self):
MessageHandler.__init__(self)
self.status = None
self.args = None
self._bytes_parts = collections.deque()
self._body_started = False
self._body_stream_status = None
self._body = None
self._body_error_args = None
self.finished_reading = False
def setProtoAndMediumRequest(self, protocol_decoder, medium_request):
self._protocol_decoder = protocol_decoder
self._medium_request = medium_request
def byte_part_received(self, byte):
if byte not in ['E', 'S']:
raise errors.SmartProtocolError(
'Unknown response status: %r' % (byte,))
if self._body_started:
if self._body_stream_status is not None:
raise errors.SmartProtocolError(
'Unexpected byte part received: %r' % (byte,))
self._body_stream_status = byte
else:
if self.status is not None:
raise errors.SmartProtocolError(
'Unexpected byte part received: %r' % (byte,))
self.status = byte
def bytes_part_received(self, bytes):
self._body_started = True
self._bytes_parts.append(bytes)
def structure_part_received(self, structure):
if type(structure) is not tuple:
raise errors.SmartProtocolError(
'Args structure is not a sequence: %r' % (structure,))
if not self._body_started:
if self.args is not None:
raise errors.SmartProtocolError(
'Unexpected structure received: %r (already got %r)'
% (structure, self.args))
self.args = structure
else:
if self._body_stream_status != 'E':
raise errors.SmartProtocolError(
'Unexpected structure received after body: %r'
% (structure,))
self._body_error_args = structure
def _wait_for_response_args(self):
while self.args is None and not self.finished_reading:
self._read_more()
def _wait_for_response_end(self):
while not self.finished_reading:
self._read_more()
def _read_more(self):
next_read_size = self._protocol_decoder.next_read_size()
if next_read_size == 0:
# a complete request has been read.
self.finished_reading = True
self._medium_request.finished_reading()
return
bytes = self._medium_request.read_bytes(next_read_size)
if bytes == '':
# end of file encountered reading from server
if 'hpss' in debug.debug_flags:
mutter(
'decoder state: buf[:10]=%r, state_accept=%s',
self._protocol_decoder._get_in_buffer()[:10],
self._protocol_decoder.state_accept.__name__)
raise errors.ConnectionReset(
"Unexpected end of message. "
"Please check connectivity and permissions, and report a bug "
"if problems persist.")
self._protocol_decoder.accept_bytes(bytes)
def protocol_error(self, exception):
# Whatever the error is, we're done with this request.
self.finished_reading = True
self._medium_request.finished_reading()
raise
def read_response_tuple(self, expect_body=False):
"""Read a response tuple from the wire."""
self._wait_for_response_args()
if not expect_body:
self._wait_for_response_end()
if 'hpss' in debug.debug_flags:
mutter(' result: %r', self.args)
if self.status == 'E':
self._wait_for_response_end()
_raise_smart_server_error(self.args)
return tuple(self.args)
def read_body_bytes(self, count=-1):
"""Read bytes from the body, decoding into a byte stream.
We read all bytes at once to ensure we've checked the trailer for
errors, and then feed the buffer back as read_body_bytes is called.
Like the builtin file.read in Python, a count of -1 (the default) means
read the entire body.
"""
# TODO: we don't necessarily need to buffer the full request if count
# != -1. (2008/04/30, Andrew Bennetts)
if self._body is None:
self._wait_for_response_end()
body_bytes = ''.join(self._bytes_parts)
if 'hpss' in debug.debug_flags:
mutter(' %d body bytes read', len(body_bytes))
self._body = StringIO(body_bytes)
self._bytes_parts = None
return self._body.read(count)
def read_streamed_body(self):
while not self.finished_reading:
while self._bytes_parts:
bytes_part = self._bytes_parts.popleft()
if 'hpssdetail' in debug.debug_flags:
mutter(' %d byte part read', len(bytes_part))
yield bytes_part
self._read_more()
if self._body_stream_status == 'E':
_raise_smart_server_error(self._body_error_args)
def cancel_read_body(self):
self._wait_for_response_end()
def _raise_smart_server_error(error_tuple):
"""Raise exception based on tuple received from smart server
Specific error translation is handled by bzrlib.remote._translate_error
"""
if error_tuple[0] == 'UnknownMethod':
raise errors.UnknownSmartMethod(error_tuple[1])
raise errors.ErrorFromSmartServer(error_tuple)
| Distrotech/bzr | bzrlib/smart/message.py | Python | gpl-2.0 | 13,276 |
from __future__ import absolute_import
from Plugins.Plugin import PluginDescriptor
from Components.PluginComponent import plugins
from enigma import eDBoxLCD
from .qpip import QuadPipScreen, setDecoderMode
def main(session, **kwargs):
session.open(QuadPipScreen)
def autoStart(reason, **kwargs):
if reason == 0:
setDecoderMode("normal")
elif reason == 1:
pass
def Plugins(**kwargs):
list = []
list.append(
PluginDescriptor(name=_("Enable Quad PIP"),
description="Quad Picture in Picture",
where=[PluginDescriptor.WHERE_EXTENSIONSMENU],
fnc=main))
list.append(
PluginDescriptor(
where=[PluginDescriptor.WHERE_AUTOSTART],
fnc=autoStart))
return list
| openatv/enigma2 | lib/python/Plugins/Extensions/QuadPip/plugin.py | Python | gpl-2.0 | 682 |
import wx
from service.fit import Fit
import gui.mainFrame
from gui import globalEvents as GE
from .calc.fitRemoveImplant import FitRemoveImplantCommand
from .calc.fitAddImplant import FitAddImplantCommand
from .calc.fitRemoveBooster import FitRemoveBoosterCommand
from .calc.fitAddBooster import FitAddBoosterCommand
from .calc.fitRemoveCargo import FitRemoveCargoCommand
from .calc.fitAddCargo import FitAddCargoCommand
from .calc.fitReplaceModule import FitReplaceModuleCommand
from .calc.fitAddFighter import FitAddFighterCommand
from .calc.fitRemoveFighter import FitRemoveFighterCommand
from .calc.fitChangeDroneVariation import FitChangeDroneVariationCommand
class GuiMetaSwapCommand(wx.Command):
def __init__(self, fitID, context, itemID, selection: list):
wx.Command.__init__(self, True, "Meta Swap")
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.sFit = Fit.getInstance()
self.internal_history = wx.CommandProcessor()
self.fitID = fitID
self.itemID = itemID
self.context = context
self.data = []
fit = self.sFit.getFit(fitID)
if context == 'fittingModule':
for x in selection:
self.data.append(((FitReplaceModuleCommand, fitID, fit.modules.index(x), itemID),),)
elif context == 'implantItem':
for x in selection:
idx = fit.implants.index(x)
self.data.append(((FitRemoveImplantCommand, fitID, idx), (FitAddImplantCommand, fitID, itemID)))
elif context == 'boosterItem':
for x in selection:
idx = fit.boosters.index(x)
self.data.append(((FitRemoveBoosterCommand, fitID, idx), (FitAddBoosterCommand, fitID, itemID)))
elif context == 'cargoItem':
for x in selection:
self.data.append(((FitRemoveCargoCommand, fitID, x.itemID, 1, True), (FitAddCargoCommand, fitID, itemID, x.amount)))
elif context == 'fighterItem':
for x in selection:
self.data.append(((FitRemoveFighterCommand, fitID, fit.fighters.index(x)), (FitAddFighterCommand, fitID, itemID)))
elif context == 'droneItem':
for x in selection:
self.data.append(((FitChangeDroneVariationCommand, fitID, fit.drones.index(x), itemID),),)
def Do(self):
for cmds in self.data:
for cmd in cmds:
self.internal_history.Submit(cmd[0](*cmd[1:]))
self.sFit.recalc(self.fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=self.fitID))
return True
def Undo(self):
for _ in self.internal_history.Commands:
self.internal_history.Undo()
self.sFit.recalc(self.fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=self.fitID))
return True
| blitzmann/Pyfa | gui/fitCommands/guiMetaSwap.py | Python | gpl-3.0 | 2,845 |
import logging
from atracker.models import Event
from atracker.util import create_event
from django.conf.urls import url
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from tastypie.authentication import (
MultiAuthentication,
Authentication,
SessionAuthentication,
ApiKeyAuthentication,
)
from tastypie.authorization import Authorization
from tastypie.http import HttpUnauthorized
from tastypie.resources import ModelResource
from tastypie.utils import trailing_slash
log = logging.getLogger(__name__)
class EventResource(ModelResource):
class Meta:
queryset = Event.objects.all()
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
resource_name = "atracker/event"
include_resource_uri = False
# TODO: double-check for sensitive information
fields = ["created"]
authentication = MultiAuthentication(
SessionAuthentication(), ApiKeyAuthentication(), Authentication()
)
authorization = Authorization()
always_return_data = True
filtering = {}
def dehydrate(self, bundle):
return bundle
def prepend_urls(self):
return [
url(
r"^(?P<resource_name>%s)/(?P<content_type>[\w.]+)/(?P<object_uuid>[\w.-]+)(?:/(?P<action>[\w-]+))?(?:/(?P<user_id>-?[0-9]+))?%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view("create_event_for_user"),
name="atracker-create-event-for-user",
)
]
# creante event in behalf of user
"""
call via curl
curl -i \
-H "Accept: application/json" \
-H "Authorization: ApiKey remote:d65b075c593f27a42c26e65be74c047e5b50d215" \
http://local.openbroadcast.org:8080/api/v1/atracker/event/alibrary.media/4faa159c-87f4-43eb-b2b7-a4de124a05e5/stream/1/?format=json
"""
def create_event_for_user(self, request, **kwargs):
self.method_check(request, allowed=["get"])
self.is_authenticated(request)
self.throttle_check(request)
object_uuid = kwargs.get("object_uuid", None)
content_type = kwargs.get("content_type", None)
orig_ct = content_type
action = kwargs.get("action", None)
user_id = kwargs.get("user_id", None)
if user_id:
user_id = int(user_id)
log.debug(
"create_event_for_user - content_type: %s - object_uuid: %s - action: %s - user_id: %s"
% (content_type, object_uuid, action, user_id)
)
if isinstance(content_type, basestring) and "." in content_type:
app, modelname = content_type.split(".")
content_type = ContentType.objects.get(
app_label=app, model__iexact=modelname
)
elif isinstance(content_type, basestring):
content_type = ContentType.objects.get(id=int(content_type))
else:
raise ValueError('content_type must a ct id or "app.modelname" string')
if user_id:
log.debug("creating event on _behalf_ of user with id: %s" % user_id)
if request.user.has_perm("atracker.track_for_user"):
user = get_user_model().objects.get(pk=user_id)
log.info("voting for user by id: %s" % user.username)
else:
log.warning(
"no permission for %s to vote in behalf of %s"
% (request.user, user_id)
)
user = None
elif request.user and request.user.is_authenticated():
user = request.user
log.info("creating event for user by request: %s" % user.username)
else:
log.debug("no authenticated user")
user = None
object = content_type.model_class().objects.get(uuid=object_uuid)
if action:
if not user:
return HttpUnauthorized("No permission to update this resource.")
create_event(user, object, None, action)
bundle = {
"object_id": object.id,
"object_uuid": object.uuid,
"ct": orig_ct,
"action": action,
}
self.log_throttled_access(request)
return self.create_response(request, bundle)
| hzlf/openbroadcast.org | website/apps/atracker/api/event.py | Python | gpl-3.0 | 4,386 |
# -*- coding: UTF-8 -*-
'''
virtualbox_const_support.py
Constants for VirtualBox.
'''
__author__ = "Karol Będkowski <[email protected]>"
__version__ = '0.3'
# virtual machine states
VM_STATE_POWEROFF = 0
VM_STATE_POWERON = 1
VM_STATE_PAUSED = 2
# virtual machine actions
VM_START_NORMAL = 1
VM_START_HEADLESS = 2
VM_PAUSE = 3
VM_POWEROFF = 4
VM_ACPI_POWEROFF = 5
VM_REBOOT = 6
VM_RESUME = 7
VM_SAVE = 8
| cjparsons74/Kupfer-cjparsons74 | kupfer/plugin/virtualbox/constants.py | Python | gpl-3.0 | 417 |
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import subprocess
import sys
import time
import traceback
from ansible.compat.six import iteritems, string_types, binary_type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure
from ansible.executor.task_result import TaskResult
from ansible.module_utils._text import to_bytes, to_text
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.template import Templar
from ansible.utils.encrypt import key_for_hostname
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.ssh_functions import check_for_controlpersist
from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskExecutor']
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
# Modules that we optimize by squashing loop items into a single call to
# the module
SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, rslt_q):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._rslt_q = rslt_q
self._task.squash()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
display.debug("in run()")
try:
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# loop through the item results, and remember the changed/failed
# result flags based on any item there.
changed = False
failed = False
for item in item_results:
if 'changed' in item and item['changed']:
changed = True
if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
# flags there to reflect the overall result of the loop
res = dict(results=item_results)
if changed:
res['changed'] = True
if failed:
res['failed'] = True
res['msg'] = 'One or more items failed'
else:
res['msg'] = 'All items completed'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
display.debug("calling self._execute()")
res = self._execute()
display.debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
def _clean_res(res):
if isinstance(res, UnsafeProxy):
return res._obj
elif isinstance(res, binary_type):
return to_text(res, errors='surrogate_or_strict')
elif isinstance(res, dict):
for k in res:
res[k] = _clean_res(res[k])
elif isinstance(res, list):
for idx,item in enumerate(res):
res[idx] = _clean_res(item)
return res
display.debug("dumping result to json")
res = _clean_res(res)
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
return dict(failed=True, msg=to_text(e, nonstring='simplerepr'))
except Exception as e:
return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_text(traceback.format_exc()), stdout='')
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# save the play context variables to a temporary dictionary,
# so that we can modify the job vars without doing a full copy
# and later restore them to avoid modifying things too early
play_context_vars = dict()
self._play_context.update_vars(play_context_vars)
old_vars = dict()
for k in play_context_vars:
if k in self._job_vars:
old_vars[k] = self._job_vars[k]
self._job_vars[k] = play_context_vars[k]
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
if self._task.loop == 'first_found':
# first_found loops are special. If the item is undefined then we want to fall through to the next value rather than failing.
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=False, convert_bare=False)
loop_terms = [t for t in loop_terms if not templar._contains_vars(t)]
else:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# get lookup
mylookup = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in self._task.action:
break
setattr(mylookup,'_subdir', subdir + 's')
# run lookup
items = mylookup.run(terms=loop_terms, variables=self._job_vars, wantlist=True)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
# now we restore any old job variables that may have been modified,
# and delete them if they were in the play context vars but not in
# the old variables dictionary
for k in play_context_vars:
if k in old_vars:
self._job_vars[k] = old_vars[k]
else:
del self._job_vars[k]
if items:
from ansible.vars.unsafe_proxy import UnsafeProxy
for idx, item in enumerate(items):
if item is not None and not isinstance(item, UnsafeProxy):
items[idx] = UnsafeProxy(item)
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in self._job_vars['ansible_search_path']:
self._job_vars['ansible_search_path'].append(self._loader.get_basedir())
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
#task_vars = self._job_vars.copy()
task_vars = self._job_vars
loop_var = 'item'
label = None
loop_pause = 0
if self._task.loop_control:
# the value may be 'None', so we still need to default it back to 'item'
loop_var = self._task.loop_control.loop_var or 'item'
label = self._task.loop_control.label or ('{{' + loop_var + '}}')
loop_pause = self._task.loop_control.pause or 0
if loop_var in task_vars:
display.warning(u"The loop variable '%s' is already in use. "
u"You should set the `loop_var` value in the `loop_control` option for the task"
u" to something else to avoid variable collisions and unexpected behavior." % loop_var)
ran_once = False
items = self._squash_items(items, loop_var, task_vars)
for item in items:
task_vars[loop_var] = item
# pause between loop iterations
if loop_pause and ran_once:
time.sleep(loop_pause)
else:
ran_once = True
try:
tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
tmp_task._parent = self._task._parent
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
results.append(dict(failed=True, msg=to_text(e)))
continue
# now we swap the internal task and play context with their copies,
# execute, and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
# now update the result with the item info, and append the result
# to the list of results
res[loop_var] = item
res['_ansible_item_result'] = True
if label is not None:
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
res['_ansible_item_label'] = templar.template(label)
self._rslt_q.put(TaskResult(self._host.name, self._task._uuid, res), block=False)
results.append(res)
del task_vars[loop_var]
return results
def _squash_items(self, items, loop_var, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
name = None
try:
# _task.action could contain templatable strings (via action: and
# local_action:) Template it before comparing. If we don't end up
# optimizing it here, the templatable string might use template vars
# that aren't available until later (it could even use vars from the
# with_items loop) so don't make the templated string permanent yet.
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
task_action = self._task.action
if templar._contains_vars(task_action):
task_action = templar.template(task_action, fail_on_undefined=False)
if len(items) > 0 and task_action in self.SQUASH_ACTIONS:
if all(isinstance(o, string_types) for o in items):
final_items = []
for allowed in ['name', 'pkg', 'package']:
name = self._task.args.pop(allowed, None)
if name is not None:
break
# This gets the information to check whether the name field
# contains a template that we can squash for
template_no_item = template_with_item = None
if name:
if templar._contains_vars(name):
variables[loop_var] = '\0$'
template_no_item = templar.template(name, variables, cache=False)
variables[loop_var] = '\0@'
template_with_item = templar.template(name, variables, cache=False)
del variables[loop_var]
# Check if the user is doing some operation that doesn't take
# name/pkg or the name/pkg field doesn't have any variables
# and thus the items can't be squashed
if template_no_item != template_with_item:
for item in items:
variables[loop_var] = item
if self._task.evaluate_conditional(templar, variables):
new_item = templar.template(name, cache=False)
final_items.append(new_item)
self._task.args['name'] = final_items
# Wrap this in a list so that the calling function loop
# executes exactly once
return [final_items]
else:
# Restore the name parameter
self._task.args['name'] = name
#elif:
# Right now we only optimize single entries. In the future we
# could optimize more types:
# * lists can be squashed together
# * dicts could squash entries that match in all cases except the
# name or pkg field.
except:
# Squashing is an optimization. If it fails for any reason,
# simply use the unoptimized list of items.
# Restore the name parameter
if name is not None:
self._task.args['name'] = name
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
context_validation_error = None
try:
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
self._play_context.update_vars(variables)
except AnsibleError as e:
# save the error, which we'll raise later if we don't end up
# skipping this task during the conditional evaluation step
context_validation_error = e
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
try:
if not self._task.evaluate_conditional(templar, variables):
display.debug("when evaluation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)
except AnsibleError:
# skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
if self._task.action not in ['include', 'include_role']:
raise
# if we ran into an error while setting up the PlayContext, raise it now
if context_validation_error is not None:
raise context_validation_error
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.pop('_raw_params', None)
if not include_file:
return dict(failed=True, msg="No include file was specified to the include")
include_file = templar.template(include_file)
return dict(include=include_file, include_variables=include_variables)
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action == 'include_role':
include_variables = self._task.args.copy()
return dict(include_role=self._task, include_variables=include_variables)
# Now we do final validation on the task, which sets all fields to their final values.
self._task.post_validate(templar=templar)
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
variable_params.update(self._task.args)
self._task.args = variable_params
# get the connection and the handler for this execution
if not self._connection or not getattr(self._connection, 'connected', False) or self._play_context.remote_addr != self._connection._play_context.remote_addr:
self._connection = self._get_connection(variables=variables, templar=templar)
hostvars = variables.get('hostvars', None)
if hostvars:
try:
target_hostvars = hostvars.raw_get(self._host.name)
except:
# FIXME: this should catch the j2undefined error here
# specifically instead of all exceptions
target_hostvars = dict()
else:
target_hostvars = dict()
self._connection.set_host_overrides(host=self._host, hostvars=target_hostvars)
else:
# if connection is reused, its _play_context is no longer valid and needs
# to be replaced with the one templated above, in case other data changed
self._connection._play_context = self._play_context
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)
# Read some values from the task, so that we can modify them if need be
if self._task.until:
retries = self._task.retries
if retries is None:
retries = 3
elif retries <= 0:
retries = 1
else:
retries += 1
else:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
display.debug("starting attempt loop")
result = None
for attempt in range(1, retries + 1):
display.debug("running the handler")
try:
result = self._handler.run(task_vars=variables)
except AnsibleConnectionFailure as e:
return dict(unreachable=True, msg=to_text(e))
display.debug("handler run complete")
# preserve no log
result["_ansible_no_log"] = self._play_context.no_log
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
vars_copy[self._task.register] = wrap_var(result.copy())
if self._task.async > 0:
if self._task.poll > 0 and not result.get('skipped'):
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
# ensure no log is preserved
result["_ansible_no_log"] = self._play_context.no_log
# helper methods for use below in evaluating changed/failed_when
def _evaluate_changed_when_result(result):
if self._task.changed_when is not None and self._task.changed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.changed_when
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
def _evaluate_failed_when_result(result):
if self._task.failed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.failed_when
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
else:
failed_when_result = False
return failed_when_result
if 'ansible_facts' in result:
vars_copy.update(result['ansible_facts'])
# set the failed property if the result has a non-zero rc. This will be
# overridden below if the failed_when property is set
if result.get('rc', 0) != 0:
result['failed'] = True
# if we didn't skip this task, use the helpers to evaluate the changed/
# failed_when properties
if 'skipped' not in result:
_evaluate_changed_when_result(result)
_evaluate_failed_when_result(result)
if retries > 1:
cond = Conditional(loader=self._loader)
cond.when = self._task.until
result['attempts'] = attempt
if cond.evaluate_conditional(templar, vars_copy):
break
else:
# no conditional check, or it failed, so sleep for the specified time
if attempt < retries:
result['_ansible_retry'] = True
result['retries'] = retries
display.debug('Retrying task, attempt %d of %d' % (attempt, retries))
self._rslt_q.put(TaskResult(self._host.name, self._task._uuid, result), block=False)
time.sleep(delay)
else:
if retries > 1:
# we ran out of attempts, so mark the result as failed
result['attempts'] = retries - 1
result['failed'] = True
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = wrap_var(result)
if 'ansible_facts' in result:
variables.update(result['ansible_facts'])
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# add the delegated vars to the result, so we can reference them
# on the results side without having to do any further templating
# FIXME: we only want a limited set of variables here, so this is currently
# hardcoded but should be possibly fixed if we want more or if
# there is another source of truth we can use
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict()).copy()
if len(delegated_vars) > 0:
result["_ansible_delegated_vars"] = dict()
for k in ('ansible_host', ):
result["_ansible_delegated_vars"][k] = delegated_vars.get(k)
# and return
display.debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar, task_vars=None):
'''
Polls for the specified JID to be complete
'''
if task_vars is None:
task_vars = self._job_vars
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = self._shared_loader_obj.action_loader.get(
'normal',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
try:
async_result = normal_handler.run(task_vars=task_vars)
# We do not bail out of the loop in cases where the failure
# is associated with a parsing error. The async_runner can
# have issues which result in a half-written/unparseable result
# file on disk, which manifests to the user as a timeout happening
# before it's time to timeout.
if int(async_result.get('finished', 0)) == 1 or ('failed' in async_result and async_result.get('_ansible_parsed', False)) or 'skipped' in async_result:
break
except Exception as e:
# Connections can raise exceptions during polling (eg, network bounce, reboot); these should be non-fatal.
# On an exception, call the connection's reset method if it has one (eg, drop/recreate WinRM connection; some reused connections are in a broken state)
display.vvvv("Exception during async poll, retrying... (%s)" % to_text(e))
display.debug("Async poll exception was:\n%s" % to_text(traceback.format_exc()))
try:
normal_handler._connection._reset()
except AttributeError:
pass
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
if async_result.get('_ansible_parsed'):
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return dict(failed=True, msg="async task produced unparseable results", async_result=async_result)
else:
return async_result
def _get_connection(self, variables, templar):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
if self._task.delegate_to is not None:
# since we're delegating, we don't want to use interpreter values
# which would have been set for the original target host
for i in list(variables.keys()):
if isinstance(i, string_types) and i.startswith('ansible_') and i.endswith('_interpreter'):
del variables[i]
# now replace the interpreter values with those that may have come
# from the delegated-to host
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict())
if isinstance(delegated_vars, dict):
for i in delegated_vars:
if isinstance(i, string_types) and i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = delegated_vars[i]
conn_type = self._play_context.connection
if conn_type == 'smart':
conn_type = 'ssh'
if sys.platform.startswith('darwin') and self._play_context.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
conn_type = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
if not check_for_controlpersist(self._play_context.ssh_executable):
conn_type = "paramiko"
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
if self._play_context.accelerate:
# accelerate is deprecated as of 2.1...
display.deprecated('Accelerated mode is deprecated. Consider using SSH with ControlPersist and pipelining enabled instead')
# launch the accelerated daemon here
ssh_connection = connection
handler = self._shared_loader_obj.action_loader.get(
'normal',
task=self._task,
connection=ssh_connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
key = key_for_hostname(self._play_context.remote_addr)
accelerate_args = dict(
password=base64.b64encode(key.__str__()),
port=self._play_context.accelerate_port,
minutes=C.ACCELERATE_DAEMON_TIMEOUT,
ipv6=self._play_context.accelerate_ipv6,
debug=self._play_context.verbosity,
)
connection = self._shared_loader_obj.connection_loader.get('accelerate', self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
try:
connection._connect()
except AnsibleConnectionFailure:
display.debug('connection failed, fallback to accelerate')
res = handler._execute_module(module_name='accelerate', module_args=accelerate_args, task_vars=variables, delete_remote_tmp=False)
display.debug(res)
connection._connect()
return connection
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in self._shared_loader_obj.action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % self._task.action)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = self._shared_loader_obj.action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
| wenottingham/ansible | lib/ansible/executor/task_executor.py | Python | gpl-3.0 | 35,230 |
# vi: ts=4 expandtab
#
# Copyright (C) 2011 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Scott Moser <[email protected]>
# Author: Juerg Haefliger <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit.settings import PER_INSTANCE
from cloudinit import util
frequency = PER_INSTANCE
# This is a tool that cloud init provides
HELPER_TOOL_TPL = '%s/cloud-init/write-ssh-key-fingerprints'
def _get_helper_tool_path(distro):
try:
base_lib = distro.usr_lib_exec
except AttributeError:
base_lib = '/usr/lib'
return HELPER_TOOL_TPL % base_lib
def handle(name, cfg, cloud, log, _args):
helper_path = _get_helper_tool_path(cloud.distro)
if not os.path.exists(helper_path):
log.warn(("Unable to activate module %s,"
" helper tool not found at %s"), name, helper_path)
return
fp_blacklist = util.get_cfg_option_list(cfg,
"ssh_fp_console_blacklist", [])
key_blacklist = util.get_cfg_option_list(cfg,
"ssh_key_console_blacklist",
["ssh-dss"])
try:
cmd = [helper_path]
cmd.append(','.join(fp_blacklist))
cmd.append(','.join(key_blacklist))
(stdout, _stderr) = util.subp(cmd)
util.multi_log("%s\n" % (stdout.strip()),
stderr=False, console=True)
except Exception:
log.warn("Writing keys to the system console failed!")
raise
| prometheanfire/cloud-init | cloudinit/config/cc_keys_to_console.py | Python | gpl-3.0 | 2,179 |
# shipVelocityBonusMI
#
# Used by:
# Variations of ship: Mammoth (2 of 2)
# Ship: Hoarder
# Ship: Prowler
type = "passive"
def handler(fit, ship, context):
fit.ship.boostItemAttr("maxVelocity", ship.getModifiedItemAttr("shipBonusMI"), skill="Minmatar Industrial")
| Ebag333/Pyfa | eos/effects/shipvelocitybonusmi.py | Python | gpl-3.0 | 270 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Group.is_open'
db.delete_column('askbot_group', 'is_open')
def backwards(self, orm):
# Adding field 'Group.is_open'
db.add_column('askbot_group', 'is_open',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousproblem': {
'Meta': {'object_name': 'AnonymousProblem'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_problems'", 'to': "orm['askbot.Post']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousexercise': {
'Meta': {'object_name': 'AnonymousExercise'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.askwidget': {
'Meta': {'object_name': 'AskWidget'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_text_field': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'inner_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'outer_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'askbot.draftproblem': {
'Meta': {'object_name': 'DraftProblem'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_problems'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_problems'", 'to': "orm['askbot.Thread']"})
},
'askbot.draftexercise': {
'Meta': {'object_name': 'DraftExercise'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125', 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'})
},
'askbot.emailfeedsetting': {
'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoriteexercise': {
'Meta': {'object_name': 'FavoriteExercise', 'db_table': "u'favorite_exercise'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_exercises'", 'to': "orm['auth.User']"})
},
'askbot.group': {
'Meta': {'object_name': 'Group', '_ormbases': ['auth.Group']},
'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'openness': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.post': {
'Meta': {'object_name': 'Post'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_posts'", 'symmetrical': 'False', 'through': "orm['askbot.PostToGroup']", 'to': "orm['askbot.Group']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'old_problem_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_exercise_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.postflagreason': {
'Meta': {'object_name': 'PostFlagReason'},
'added_at': ('django.db.models.fields.DateTimeField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.posttogroup': {
'Meta': {'unique_together': "(('post', 'group'),)", 'object_name': 'PostToGroup', 'db_table': "'askbot_post_groups'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']"})
},
'askbot.exerciseview': {
'Meta': {'object_name': 'ExerciseView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exercise_views'", 'to': "orm['auth.User']"})
},
'askbot.exercisewidget': {
'Meta': {'object_name': 'ExerciseWidget'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_by': ('django.db.models.fields.CharField', [], {'default': "'-added_at'", 'max_length': '18'}),
'exercise_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '7'}),
'search_query': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.TextField', [], {'default': '"\\n@import url(\'http://fonts.googleapis.com/css?family=Yanone+Kaffeesatz:300,400,700\');\\nbody {\\n overflow: hidden;\\n}\\n\\n#container {\\n width: 200px;\\n height: 350px;\\n}\\nul {\\n list-style: none;\\n padding: 5px;\\n margin: 5px;\\n}\\nli {\\n border-bottom: #CCC 1px solid;\\n padding-bottom: 5px;\\n padding-top: 5px;\\n}\\nli:last-child {\\n border: none;\\n}\\na {\\n text-decoration: none;\\n color: #464646;\\n font-family: \'Yanone Kaffeesatz\', sans-serif;\\n font-size: 15px;\\n}\\n"', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.replyaddress': {
'Meta': {'object_name': 'ReplyAddress'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'reply_action': ('django.db.models.fields.CharField', [], {'default': "'auto_problem_or_comment'", 'max_length': '32'}),
'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'suggested_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'suggested_tags'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'accepted_problem': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'problem_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'problem_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteExercise']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_threads'", 'symmetrical': 'False', 'db_table': "'askbot_thread_groups'", 'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_fake': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'exercises_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_marked_tags': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot'] | maxwward/SCOPEBak | askbot/migrations/0141_auto__del_field_group_is_open.py | Python | gpl-3.0 | 32,000 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import sys
import traceback
# monkey patch bug in python 2.6 and lower
# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
if sys.version_info < (2, 7) and os.name != "nt":
import errno
import subprocess
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
except OSError, e:
if e.errno == errno.EINTR:
continue
raise
#: Unsued timeout option for older python version
def wait(self, timeout=0):
"""
Wait for child process to terminate. Returns returncode
attribute.
"""
if self.returncode is None:
try:
pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
except OSError, e:
if e.errno != errno.ECHILD:
raise
#: This happens if SIGCLD is set to be ignored or waiting
#: For child processes has otherwise been disabled for our
#: process. This child is dead, we can't get the status.
sts = 0
self._handle_exitstatus(sts)
return self.returncode
subprocess.Popen.wait = wait
try:
import send2trash
except ImportError:
pass
from module.plugins.internal.Addon import Addon, Expose, threaded
from module.plugins.internal.Plugin import exists, replace_patterns
from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError
from module.utils import fs_encode, save_join as fs_join, uniqify
class ArchiveQueue(object):
def __init__(self, plugin, storage):
self.plugin = plugin
self.storage = storage
def get(self):
try:
return [int(pid) for pid in self.plugin.retrieve("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
except Exception:
return []
def set(self, value):
if isinstance(value, list):
item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
else:
item = str(value).strip()
return self.plugin.store("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
def delete(self):
return self.plugin.delete("ExtractArchive:%s" % self.storage)
def add(self, item):
queue = self.get()
if item not in queue:
return self.set(queue + [item])
else:
return True
def remove(self, item):
queue = self.get()
try:
queue.remove(item)
except ValueError:
pass
if queue is []:
return self.delete()
return self.set(queue)
class ExtractArchive(Addon):
__name__ = "ExtractArchive"
__type__ = "hook"
__version__ = "1.50"
__status__ = "testing"
__config__ = [("activated" , "bool" , "Activated" , True ),
("fullpath" , "bool" , "Extract with full paths" , True ),
("overwrite" , "bool" , "Overwrite files" , False ),
("keepbroken" , "bool" , "Try to extract broken archives" , False ),
("repair" , "bool" , "Repair broken archives (RAR required)" , False ),
("test" , "bool" , "Test archive before extracting" , False ),
("usepasswordfile", "bool" , "Use password file" , True ),
("passwordfile" , "file" , "Password file" , "passwords.txt" ),
("delete" , "bool" , "Delete archive after extraction" , True ),
("deltotrash" , "bool" , "Move to trash (recycle bin) instead delete", True ),
("subfolder" , "bool" , "Create subfolder for each package" , False ),
("destination" , "folder" , "Extract files to folder" , "" ),
("extensions" , "str" , "Extract archives ending with extension" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
("recursive" , "bool" , "Extract archives in archives" , True ),
("waitall" , "bool" , "Run after all downloads was processed" , False ),
("renice" , "int" , "CPU priority" , 0 )]
__description__ = """Extract different kind of archives"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]"),
("Immenz" , "[email protected]" )]
NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downloads_processed",
'packageDeleted' : "package_deleted" }
self.queue = ArchiveQueue(self, "Queue")
self.failed = ArchiveQueue(self, "Failed")
self.interval = 60
self.extracting = False
self.last_package = False
self.extractors = []
self.passwords = []
self.repair = False
def activate(self):
for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.pyload.pluginManager.loadModule("internal", p)
klass = getattr(module, p)
if klass.find():
self.extractors.append(klass)
if klass.REPAIR:
self.repair = self.get_config('repair')
except OSError, e:
if e.errno == 2:
self.log_warning(_("No %s installed") % p)
else:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
except Exception, e:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
if self.extractors:
self.log_debug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
self.extract_queued() #: Resume unfinished extractions
else:
self.log_info(_("No Extract plugins activated"))
@threaded
def extract_queued(self, thread):
if self.extracting: #@NOTE: doing the check here for safty (called by coreReady)
return
self.extracting = True
packages = self.queue.get()
while packages:
if self.last_package: #: Called from allDownloadsProcessed
self.last_package = False
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
self.manager.dispatchEvent("all_archives_extracted")
self.manager.dispatchEvent("all_archives_processed")
else:
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
pass
packages = self.queue.get() #: Check for packages added during extraction
self.extracting = False
#: Deprecated method, use `extract_package` instead
@Expose
def extractPackage(self, *args, **kwargs):
"""
See `extract_package`
"""
return self.extract_package(*args, **kwargs)
@Expose
def extract_package(self, *ids):
"""
Extract packages with given id
"""
for id in ids:
self.queue.add(id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def package_deleted(self, pid):
self.queue.remove(pid)
def package_finished(self, pypack):
self.queue.add(pypack.id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def all_downloads_processed(self):
self.last_package = True
if self.get_config('waitall') and not self.extracting:
self.extract_queued()
@Expose
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability
if not ids:
return False
processed = []
extracted = []
failed = []
toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
destination = self.get_config('destination')
subfolder = self.get_config('subfolder')
fullpath = self.get_config('fullpath')
overwrite = self.get_config('overwrite')
renice = self.get_config('renice')
recursive = self.get_config('recursive')
delete = self.get_config('delete')
keepbroken = self.get_config('keepbroken')
extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))]
excludefiles = toList(self.get_config('excludefiles'))
if extensions:
self.log_debug("Use for extensions: %s" % "|.".join(extensions))
#: Reload from txt file
self.reload_passwords()
download_folder = self.pyload.config.get("general", "download_folder")
#: Iterate packages -> extractors -> targets
for pid in ids:
pypack = self.pyload.files.getPackage(pid)
if not pypack:
self.queue.remove(pid)
continue
self.log_info(_("Check package: %s") % pypack.name)
#: Determine output folder
out = fs_join(download_folder, pypack.folder, destination, "") #: Force trailing slash
if subfolder:
out = fs_join(out, pypack.folder)
if not exists(out):
os.makedirs(out)
matched = False
success = True
files_ids = dict((pylink['name'], ((fs_join(download_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \
in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates
#: Check as long there are unseen files
while files_ids:
new_files_ids = []
if extensions:
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if filter(lambda ext: fname.lower().endswith(ext), extensions)]
for Extractor in self.extractors:
targets = Extractor.get_targets(files_ids)
if targets:
self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets))
matched = True
for fname, fid, fout in targets:
name = os.path.basename(fname)
if not exists(fname):
self.log_debug(name, "File not found")
continue
self.log_info(name, _("Extract to: %s") % fout)
try:
pyfile = self.pyload.files.getFile(fid)
archive = Extractor(self,
fname,
fout,
fullpath,
overwrite,
excludefiles,
renice,
delete,
keepbroken,
fid)
thread.addActive(pyfile)
archive.init()
try:
new_files = self._extract(pyfile, archive, pypack.password)
finally:
pyfile.setProgress(100)
thread.finishFile(pyfile)
except Exception, e:
self.log_error(name, e)
success = False
continue
#: Remove processed file and related multiparts from list
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if fname not in archive.get_delete_files()]
self.log_debug("Extracted files: %s" % new_files)
for file in new_files:
self.set_permissions(file)
for filename in new_files:
file = fs_encode(fs_join(os.path.dirname(archive.filename), filename))
if not exists(file):
self.log_debug("New file %s does not exists" % filename)
continue
if recursive and os.path.isfile(file):
new_files_ids.append((filename, fid, os.path.dirname(filename))) #: Append as new target
self.manager.dispatchEvent("archive_extracted", pyfile, archive)
files_ids = new_files_ids #: Also check extracted files
if matched:
if success:
extracted.append(pid)
self.manager.dispatchEvent("package_extracted", pypack)
else:
failed.append(pid)
self.manager.dispatchEvent("package_extract_failed", pypack)
self.failed.add(pid)
else:
self.log_info(_("No files found to extract"))
if not matched or not success and subfolder:
try:
os.rmdir(out)
except OSError:
pass
self.queue.remove(pid)
return True if not failed else False
def _extract(self, pyfile, archive, password):
name = os.path.basename(archive.filename)
pyfile.setStatus("processing")
encrypted = False
try:
self.log_debug("Password: %s" % (password or "None provided"))
passwords = uniqify([password] + self.get_passwords(False)) if self.get_config('usepasswordfile') else [password]
for pw in passwords:
try:
if self.get_config('test') or self.repair:
pyfile.setCustomStatus(_("archive testing"))
if pw:
self.log_debug("Testing with password: %s" % pw)
pyfile.setProgress(0)
archive.verify(pw)
pyfile.setProgress(100)
else:
archive.check(pw)
self.add_password(pw)
break
except PasswordError:
if not encrypted:
self.log_info(name, _("Password protected"))
encrypted = True
except CRCError, e:
self.log_debug(name, e)
self.log_info(name, _("CRC Error"))
if self.repair:
self.log_warning(name, _("Repairing..."))
pyfile.setCustomStatus(_("archive repairing"))
pyfile.setProgress(0)
repaired = archive.repair()
pyfile.setProgress(100)
if not repaired and not self.get_config('keepbroken'):
raise CRCError("Archive damaged")
self.add_password(pw)
break
raise CRCError("Archive damaged")
except ArchiveError, e:
raise ArchiveError(e)
pyfile.setCustomStatus(_("extracting"))
pyfile.setProgress(0)
if not encrypted or not self.get_config('usepasswordfile'):
self.log_debug("Extracting using password: %s" % (password or "None"))
archive.extract(password)
else:
for pw in filter(None, uniqify([password] + self.get_passwords(False))):
try:
self.log_debug("Extracting using password: %s" % pw)
archive.extract(pw)
self.add_password(pw)
break
except PasswordError:
self.log_debug("Password was wrong")
else:
raise PasswordError
pyfile.setProgress(100)
pyfile.setStatus("processing")
delfiles = archive.get_delete_files()
self.log_debug("Would delete: " + ", ".join(delfiles))
if self.get_config('delete'):
self.log_info(_("Deleting %s files") % len(delfiles))
deltotrash = self.get_config('deltotrash')
for f in delfiles:
file = fs_encode(f)
if not exists(file):
continue
if not deltotrash:
os.remove(file)
else:
try:
send2trash.send2trash(file)
except NameError:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
_("Send2Trash lib not found"))
except Exception, e:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
e.message)
else:
self.log_info(_("Moved %s to trash") % os.path.basename(f))
self.log_info(name, _("Extracting finished"))
extracted_files = archive.files or archive.list()
return extracted_files
except PasswordError:
self.log_error(name, _("Wrong password" if password else "No password found"))
except CRCError, e:
self.log_error(name, _("CRC mismatch"), e)
except ArchiveError, e:
self.log_error(name, _("Archive error"), e)
except Exception, e:
self.log_error(name, _("Unknown error"), e)
if self.pyload.debug:
traceback.print_exc()
self.manager.dispatchEvent("archive_extract_failed", pyfile, archive)
raise Exception(_("Extract failed"))
#: Deprecated method, use `get_passwords` instead
@Expose
def getPasswords(self, *args, **kwargs):
"""
See `get_passwords`
"""
return self.get_passwords(*args, **kwargs)
@Expose
def get_passwords(self, reload=True):
"""
List of saved passwords
"""
if reload:
self.reload_passwords()
return self.passwords
def reload_passwords(self):
try:
passwords = []
file = fs_encode(self.get_config('passwordfile'))
with open(file) as f:
for pw in f.read().splitlines():
passwords.append(pw)
except IOError, e:
self.log_error(e)
else:
self.passwords = passwords
#: Deprecated method, use `add_password` instead
@Expose
def addPassword(self, *args, **kwargs):
"""
See `add_password`
"""
return self.add_password(*args, **kwargs)
@Expose
def add_password(self, password):
"""
Adds a password to saved list
"""
try:
self.passwords = uniqify([password] + self.passwords)
file = fs_encode(self.get_config('passwordfile'))
with open(file, "wb") as f:
for pw in self.passwords:
f.write(pw + '\n')
except IOError, e:
self.log_error(e)
| benbox69/pyload | module/plugins/hooks/ExtractArchive.py | Python | gpl-3.0 | 21,976 |
"""CLI tests for Insights part of RH Cloud - Inventory plugin.
:Requirement: RH Cloud - Inventory
:CaseAutomation: Automated
:CaseLevel: System
:CaseComponent: RHCloud-Inventory
:Assignee: jpathan
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from broker.broker import VMBroker
from robottelo.constants import DISTRO_RHEL7
from robottelo.constants import DISTRO_RHEL8
from robottelo.hosts import ContentHost
@pytest.mark.tier4
@pytest.mark.parametrize('distro', [DISTRO_RHEL8, DISTRO_RHEL7])
def test_positive_connection_option(organization_ak_setup, default_sat, distro):
"""Verify that 'insights-client --test-connection' successfully tests the proxy connection via
the Satellite.
:id: 61a4a39e-b484-49f4-a6fd-46ffc7736e50
:Steps:
1. Create RHEL7 and RHEL8 VM and register to insights within org having manifest.
2. Run 'insights-client --test-connection'.
:expectedresults: 'insights-client --test-connection' should return 0.
:CaseImportance: Critical
"""
org, activation_key = organization_ak_setup
with VMBroker(nick=distro, host_classes={'host': ContentHost}) as vm:
vm.configure_rhai_client(default_sat, activation_key.name, org.label, distro)
result = vm.run('insights-client --test-connection')
assert result.status == 0, (
'insights-client --test-connection failed.\n'
f'status: {result.status}\n'
f'stdout: {result.stdout}\n'
f'stderr: {result.stderr}'
)
| lpramuk/robottelo | tests/foreman/cli/test_rhcloud_insights.py | Python | gpl-3.0 | 1,546 |
import li_boost_shared_ptr
import gc
debug = False
# simple shared_ptr usage - created in C++
class li_boost_shared_ptr_runme:
def main(self):
if (debug):
print "Started"
li_boost_shared_ptr.cvar.debug_shared = debug
# Change loop count to run for a long time to monitor memory
loopCount = 1 #5000
for i in range (0,loopCount):
self.runtest()
# Expect 1 instance - the one global variable (GlobalValue)
if (li_boost_shared_ptr.Klass.getTotal_count() != 1):
raise RuntimeError("Klass.total_count=%s" % li_boost_shared_ptr.Klass.getTotal_count())
wrapper_count = li_boost_shared_ptr.shared_ptr_wrapper_count()
if (wrapper_count != li_boost_shared_ptr.NOT_COUNTING):
# Expect 1 instance - the one global variable (GlobalSmartValue)
if (wrapper_count != 1):
raise RuntimeError("shared_ptr wrapper count=%s" % wrapper_count)
if (debug):
print "Finished"
def runtest(self):
# simple shared_ptr usage - created in C++
k = li_boost_shared_ptr.Klass("me oh my")
val = k.getValue()
self.verifyValue("me oh my", val)
self.verifyCount(1, k)
# simple shared_ptr usage - not created in C++
k = li_boost_shared_ptr.factorycreate()
val = k.getValue()
self.verifyValue("factorycreate", val)
self.verifyCount(1, k)
# pass by shared_ptr
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.smartpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointertest", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr pointer
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.smartpointerpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerpointertest", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.smartpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerreftest", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr pointer reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.smartpointerpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerpointerreftest", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# const pass by shared_ptr
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.constsmartpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# const pass by shared_ptr pointer
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.constsmartpointerpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# const pass by shared_ptr reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.constsmartpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by value
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.valuetest(k)
val = kret.getValue()
self.verifyValue("me oh my valuetest", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by pointer
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.pointertest(k)
val = kret.getValue()
self.verifyValue("me oh my pointertest", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.reftest(k)
val = kret.getValue()
self.verifyValue("me oh my reftest", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by pointer reference
k = li_boost_shared_ptr.Klass("me oh my")
kret = li_boost_shared_ptr.pointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my pointerreftest", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# null tests
k = None
if (li_boost_shared_ptr.smartpointertest(k) != None):
raise RuntimeError("return was not null")
if (li_boost_shared_ptr.smartpointerpointertest(k) != None):
raise RuntimeError("return was not null")
if (li_boost_shared_ptr.smartpointerreftest(k) != None):
raise RuntimeError("return was not null")
if (li_boost_shared_ptr.smartpointerpointerreftest(k) != None):
raise RuntimeError("return was not null")
if (li_boost_shared_ptr.nullsmartpointerpointertest(None) != "null pointer"):
raise RuntimeError("not null smartpointer pointer")
try:
li_boost_shared_ptr.valuetest(k)
raise RuntimeError("Failed to catch null pointer")
except ValueError:
pass
if (li_boost_shared_ptr.pointertest(k) != None):
raise RuntimeError("return was not null")
try:
li_boost_shared_ptr.reftest(k)
raise RuntimeError("Failed to catch null pointer")
except ValueError:
pass
# $owner
k = li_boost_shared_ptr.pointerownertest()
val = k.getValue()
self.verifyValue("pointerownertest", val)
self.verifyCount(1, k)
k = li_boost_shared_ptr.smartpointerpointerownertest()
val = k.getValue()
self.verifyValue("smartpointerpointerownertest", val)
self.verifyCount(1, k)
# //////////////////////////////// Derived class ////////////////////////////////////////
# derived pass by shared_ptr
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedsmartptrtest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedsmartptrtest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# derived pass by shared_ptr pointer
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedsmartptrpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedsmartptrpointertest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# derived pass by shared_ptr ref
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedsmartptrreftest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedsmartptrreftest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# derived pass by shared_ptr pointer ref
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedsmartptrpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedsmartptrpointerreftest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# derived pass by pointer
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedpointertest-Derived", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# derived pass by ref
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.derivedreftest(k)
val = kret.getValue()
self.verifyValue("me oh my derivedreftest-Derived", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# //////////////////////////////// Derived and base class mixed ////////////////////////////////////////
# pass by shared_ptr (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.smartpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointertest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr pointer (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.smartpointerpointertest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerpointertest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr reference (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.smartpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerreftest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by shared_ptr pointer reference (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.smartpointerpointerreftest(k)
val = kret.getValue()
self.verifyValue("me oh my smartpointerpointerreftest-Derived", val)
self.verifyCount(2, k)
self.verifyCount(2, kret)
# pass by value (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.valuetest(k)
val = kret.getValue()
self.verifyValue("me oh my valuetest", val) # note slicing
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by pointer (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.pointertest(k)
val = kret.getValue()
self.verifyValue("me oh my pointertest-Derived", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# pass by ref (mixed)
k = li_boost_shared_ptr.KlassDerived("me oh my")
kret = li_boost_shared_ptr.reftest(k)
val = kret.getValue()
self.verifyValue("me oh my reftest-Derived", val)
self.verifyCount(1, k)
self.verifyCount(1, kret)
# //////////////////////////////// Overloading tests ////////////////////////////////////////
# Base class
k = li_boost_shared_ptr.Klass("me oh my")
self.verifyValue(li_boost_shared_ptr.overload_rawbyval(k), "rawbyval")
self.verifyValue(li_boost_shared_ptr.overload_rawbyref(k), "rawbyref")
self.verifyValue(li_boost_shared_ptr.overload_rawbyptr(k), "rawbyptr")
self.verifyValue(li_boost_shared_ptr.overload_rawbyptrref(k), "rawbyptrref")
self.verifyValue(li_boost_shared_ptr.overload_smartbyval(k), "smartbyval")
self.verifyValue(li_boost_shared_ptr.overload_smartbyref(k), "smartbyref")
self.verifyValue(li_boost_shared_ptr.overload_smartbyptr(k), "smartbyptr")
self.verifyValue(li_boost_shared_ptr.overload_smartbyptrref(k), "smartbyptrref")
# Derived class
k = li_boost_shared_ptr.KlassDerived("me oh my")
self.verifyValue(li_boost_shared_ptr.overload_rawbyval(k), "rawbyval")
self.verifyValue(li_boost_shared_ptr.overload_rawbyref(k), "rawbyref")
self.verifyValue(li_boost_shared_ptr.overload_rawbyptr(k), "rawbyptr")
self.verifyValue(li_boost_shared_ptr.overload_rawbyptrref(k), "rawbyptrref")
self.verifyValue(li_boost_shared_ptr.overload_smartbyval(k), "smartbyval")
self.verifyValue(li_boost_shared_ptr.overload_smartbyref(k), "smartbyref")
self.verifyValue(li_boost_shared_ptr.overload_smartbyptr(k), "smartbyptr")
self.verifyValue(li_boost_shared_ptr.overload_smartbyptrref(k), "smartbyptrref")
# 3rd derived class
k = li_boost_shared_ptr.Klass3rdDerived("me oh my")
val = k.getValue()
self.verifyValue("me oh my-3rdDerived", val)
self.verifyCount(1, k)
val = li_boost_shared_ptr.test3rdupcast(k)
self.verifyValue("me oh my-3rdDerived", val)
self.verifyCount(1, k)
# //////////////////////////////// Member variables ////////////////////////////////////////
# smart pointer by value
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("smart member value")
m.SmartMemberValue = k
val = k.getValue()
self.verifyValue("smart member value", val)
self.verifyCount(2, k)
kmember = m.SmartMemberValue
val = kmember.getValue()
self.verifyValue("smart member value", val)
self.verifyCount(3, kmember)
self.verifyCount(3, k)
del m
self.verifyCount(2, kmember)
self.verifyCount(2, k)
# smart pointer by pointer
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("smart member pointer")
m.SmartMemberPointer = k
val = k.getValue()
self.verifyValue("smart member pointer", val)
self.verifyCount(1, k)
kmember = m.SmartMemberPointer
val = kmember.getValue()
self.verifyValue("smart member pointer", val)
self.verifyCount(2, kmember)
self.verifyCount(2, k)
del m
self.verifyCount(2, kmember)
self.verifyCount(2, k)
# smart pointer by reference
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("smart member reference")
m.SmartMemberReference = k
val = k.getValue()
self.verifyValue("smart member reference", val)
self.verifyCount(2, k)
kmember = m.SmartMemberReference
val = kmember.getValue()
self.verifyValue("smart member reference", val)
self.verifyCount(3, kmember)
self.verifyCount(3, k)
# The C++ reference refers to SmartMemberValue...
kmemberVal = m.SmartMemberValue
val = kmember.getValue()
self.verifyValue("smart member reference", val)
self.verifyCount(4, kmemberVal)
self.verifyCount(4, kmember)
self.verifyCount(4, k)
del m
self.verifyCount(3, kmemberVal)
self.verifyCount(3, kmember)
self.verifyCount(3, k)
# plain by value
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("plain member value")
m.MemberValue = k
val = k.getValue()
self.verifyValue("plain member value", val)
self.verifyCount(1, k)
kmember = m.MemberValue
val = kmember.getValue()
self.verifyValue("plain member value", val)
self.verifyCount(1, kmember)
self.verifyCount(1, k)
del m
self.verifyCount(1, kmember)
self.verifyCount(1, k)
# plain by pointer
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("plain member pointer")
m.MemberPointer = k
val = k.getValue()
self.verifyValue("plain member pointer", val)
self.verifyCount(1, k)
kmember = m.MemberPointer
val = kmember.getValue()
self.verifyValue("plain member pointer", val)
self.verifyCount(1, kmember)
self.verifyCount(1, k)
del m
self.verifyCount(1, kmember)
self.verifyCount(1, k)
# plain by reference
m = li_boost_shared_ptr.MemberVariables()
k = li_boost_shared_ptr.Klass("plain member reference")
m.MemberReference = k
val = k.getValue()
self.verifyValue("plain member reference", val)
self.verifyCount(1, k)
kmember = m.MemberReference
val = kmember.getValue()
self.verifyValue("plain member reference", val)
self.verifyCount(1, kmember)
self.verifyCount(1, k)
del m
self.verifyCount(1, kmember)
self.verifyCount(1, k)
# null member variables
m = li_boost_shared_ptr.MemberVariables()
# shared_ptr by value
k = m.SmartMemberValue
if (k != None):
raise RuntimeError("expected null")
m.SmartMemberValue = None
k = m.SmartMemberValue
if (k != None):
raise RuntimeError("expected null")
self.verifyCount(0, k)
# plain by value
try:
m.MemberValue = None
raise RuntimeError("Failed to catch null pointer")
except ValueError:
pass
# ////////////////////////////////// Global variables ////////////////////////////////////////
# smart pointer
kglobal = li_boost_shared_ptr.cvar.GlobalSmartValue
if (kglobal != None):
raise RuntimeError("expected null")
k = li_boost_shared_ptr.Klass("smart global value")
li_boost_shared_ptr.cvar.GlobalSmartValue = k
self.verifyCount(2, k)
kglobal = li_boost_shared_ptr.cvar.GlobalSmartValue
val = kglobal.getValue()
self.verifyValue("smart global value", val)
self.verifyCount(3, kglobal)
self.verifyCount(3, k)
self.verifyValue("smart global value", li_boost_shared_ptr.cvar.GlobalSmartValue.getValue())
li_boost_shared_ptr.cvar.GlobalSmartValue = None
# plain value
k = li_boost_shared_ptr.Klass("global value")
li_boost_shared_ptr.cvar.GlobalValue = k
self.verifyCount(1, k)
kglobal = li_boost_shared_ptr.cvar.GlobalValue
val = kglobal.getValue()
self.verifyValue("global value", val)
self.verifyCount(1, kglobal)
self.verifyCount(1, k)
self.verifyValue("global value", li_boost_shared_ptr.cvar.GlobalValue.getValue())
try:
li_boost_shared_ptr.cvar.GlobalValue = None
raise RuntimeError("Failed to catch null pointer")
except ValueError:
pass
# plain pointer
kglobal = li_boost_shared_ptr.cvar.GlobalPointer
if (kglobal != None):
raise RuntimeError("expected null")
k = li_boost_shared_ptr.Klass("global pointer")
li_boost_shared_ptr.cvar.GlobalPointer = k
self.verifyCount(1, k)
kglobal = li_boost_shared_ptr.cvar.GlobalPointer
val = kglobal.getValue()
self.verifyValue("global pointer", val)
self.verifyCount(1, kglobal)
self.verifyCount(1, k)
li_boost_shared_ptr.cvar.GlobalPointer = None
# plain reference
kglobal
k = li_boost_shared_ptr.Klass("global reference")
li_boost_shared_ptr.cvar.GlobalReference = k
self.verifyCount(1, k)
kglobal = li_boost_shared_ptr.cvar.GlobalReference
val = kglobal.getValue()
self.verifyValue("global reference", val)
self.verifyCount(1, kglobal)
self.verifyCount(1, k)
try:
li_boost_shared_ptr.cvar.GlobalReference = None
raise RuntimeError("Failed to catch null pointer")
except ValueError:
pass
# ////////////////////////////////// Templates ////////////////////////////////////////
pid = li_boost_shared_ptr.PairIntDouble(10, 20.2)
if (pid.baseVal1 != 20 or pid.baseVal2 != 40.4):
raise RuntimeError("Base values wrong")
if (pid.val1 != 10 or pid.val2 != 20.2):
raise RuntimeError("Derived Values wrong")
def verifyValue(self, expected, got):
if (expected != got):
raise RuntimeError("verify value failed. Expected: ", expected, " Got: ", got)
def verifyCount(self, expected, k):
got = li_boost_shared_ptr.use_count(k)
if (expected != got):
raise RuntimeError("verify use_count failed. Expected: ", expected, " Got: ", got)
runme = li_boost_shared_ptr_runme()
runme.main()
| jrversteegh/softsailor | deps/swig-2.0.4/Examples/test-suite/python/li_boost_shared_ptr_runme.py | Python | gpl-3.0 | 18,306 |
"""
The basic module about log readers
"""
import os
import re
from ..utils.gzip2 import GzipFile
__author__ = 'chenxm'
__all__ = ["FileReader"]
class FileReader(object):
@staticmethod
def open_file(filename, mode='rb'):
""" open plain or compressed file
@return file handler
"""
parts = os.path.basename(filename).split('.')
try:
assert parts[-1] == 'gz'
fh = GzipFile(mode=mode, filename = filename)
except:
fh = open(filename, mode)
return fh
@staticmethod
def list_files(folder, regex_str=r'.', match=True):
""" find all files under 'folder' with names matching
some reguler expression
"""
assert os.path.isdir(folder)
all_files_path = []
for root, dirs, files in os.walk(folder):
for filename in files:
if match and re.match(regex_str, filename, re.IGNORECASE):
all_files_path.append(os.path.join(root, filename))
elif not match and re.search(regex_str, filename, re.IGNORECASE):
all_files_path.append(os.path.join(root, filename))
return all_files_path
class LogEntry(object):
def __init__(self):
self.data = {}
def get(self, property):
try:
return self[property]
except KeyError:
return None
def set(self, property, value):
self[property] = value
def __getitem__(self, property):
return self.data[property]
def __setitem__(self, property, value):
self.data[property] = value
def __str__(self):
return str(self.data)
class LogReader(object):
def __init__(self, filename):
self.filename = filename
self.filehandler = FileReader.open_file(filename)
def __iter__(self):
return self
def next(self):
try:
new_line = self.filehandler.next()
return new_line
except StopIteration:
self.filehandler.close()
raise StopIteration | pombreda/omnipy | omnipy/reader/_reader.py | Python | gpl-3.0 | 1,755 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_access_port_to_interface_policy_leaf_profile
short_description: Manage Fabric interface policy leaf profile interface selectors (infra:HPortS, infra:RsAccBaseGrp, infra:PortBlk)
description:
- Manage Fabric interface policy leaf profile interface selectors on Cisco ACI fabrics.
notes:
- More information about the internal APIC classes B(infra:HPortS), B(infra:RsAccBaseGrp) and B(infra:PortBlk) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/)
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
options:
leaf_interface_profile:
description:
- The name of the Fabric access policy leaf interface profile.
required: yes
aliases: [ leaf_interface_profile_name ]
access_port_selector:
description:
- The name of the Fabric access policy leaf interface profile access port selector.
required: yes
aliases: [ name, access_port_selector_name ]
description:
description:
- The description to assign to the C(access_port_selector)
leaf_port_blk:
description:
- The name of the Fabric access policy leaf interface profile access port block.
required: yes
aliases: [ leaf_port_blk_name ]
leaf_port_blk_description:
description:
- The description to assign to the C(leaf_port_blk)
required: no
from:
description:
- The beggining (from range) of the port range block for the leaf access port block.
required: yes
aliases: [ fromPort, from_port_range ]
to:
description:
- The end (to range) of the port range block for the leaf access port block.
required: yes
aliases: [ toPort, to_port_range ]
policy_group:
description:
- The name of the fabric access policy group to be associated with the leaf interface profile interface selector.
aliases: [ policy_group_name ]
interface_type:
version_added: '2.6'
description:
- The type of interface for the static EPG deployement.
choices: [ fex, port_channel, switch_port, vpc ]
default: switch_port
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Associate an Interface Access Port Selector to an Interface Policy Leaf Profile with a Policy Group
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
leaf_port_blk: leafportblkname
from: 13
to: 16
policy_group: policygroupname
state: present
- name: Associate an interface access port selector to an Interface Policy Leaf Profile (w/o policy group) (check if this works)
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
leaf_port_blk: leafportblkname
from: 13
to: 16
state: present
- name: Remove an interface access port selector associated with an Interface Policy Leaf Profile
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
state: absent
- name: Query Specific access_port_selector under given leaf_interface_profile
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'leaf_interface_profile': dict(type='str', aliases=['leaf_interface_profile_name']), # Not required for querying all objects
'access_port_selector': dict(type='str', aliases=['name', 'access_port_selector_name']), # Not required for querying all objects
'description': dict(typ='str'),
'leaf_port_blk': dict(type='str', aliases=['leaf_port_blk_name']),
'leaf_port_blk_description': dict(type='str'),
# NOTE: Keyword 'from' is a reserved word in python, so we need it as a string
'from': dict(type='str', aliases=['fromPort', 'from_port_range']),
'to': dict(type='str', aliases=['toPort', 'to_port_range']),
'policy_group': dict(type='str', aliases=['policy_group_name']),
'interface_type': dict(type='str', default='switch_port', choices=['fex', 'port_channel', 'switch_port', 'vpc']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query']),
})
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['leaf_interface_profile', 'access_port_selector']],
['state', 'present', ['leaf_interface_profile', 'access_port_selector']],
],
)
leaf_interface_profile = module.params['leaf_interface_profile']
access_port_selector = module.params['access_port_selector']
description = module.params['description']
leaf_port_blk = module.params['leaf_port_blk']
leaf_port_blk_description = module.params['leaf_port_blk_description']
from_ = module.params['from']
to_ = module.params['to']
policy_group = module.params['policy_group']
interface_type = module.params['interface_type']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='infraAccPortP',
aci_rn='infra/accportprof-{0}'.format(leaf_interface_profile),
filter_target='eq(infraAccPortP.name, "{0}")'.format(leaf_interface_profile),
module_object=leaf_interface_profile
),
subclass_1=dict(
aci_class='infraHPortS',
# NOTE: normal rn: hports-{name}-typ-{type}, hence here hardcoded to range for purposes of module
aci_rn='hports-{0}-typ-range'.format(access_port_selector),
filter_target='eq(infraHPortS.name, "{0}")'.format(access_port_selector),
module_object=access_port_selector,
),
child_classes=['infraPortBlk', 'infraRsAccBaseGrp']
)
INTERFACE_TYPE_MAPPING = dict(
fex='uni/infra/funcprof/accportgrp-{0}'.format(policy_group),
port_channel='uni/infra/funcprof/accbundle-{0}'.format(policy_group),
switch_port='uni/infra/funcprof/accportgrp-{0}'.format(policy_group),
vpc='uni/infra/funcprof/accbundle-{0}'.format(policy_group),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='infraHPortS',
class_config=dict(
descr=description,
name=access_port_selector,
),
child_configs=[
dict(
infraPortBlk=dict(
attributes=dict(
descr=leaf_port_blk_description,
name=leaf_port_blk,
fromPort=from_,
toPort=to_,
),
),
),
dict(
infraRsAccBaseGrp=dict(
attributes=dict(
tDn=INTERFACE_TYPE_MAPPING[interface_type],
),
),
),
],
)
aci.get_diff(aci_class='infraHPortS')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| lmprice/ansible | lib/ansible/modules/network/aci/aci_access_port_to_interface_policy_leaf_profile.py | Python | gpl-3.0 | 11,226 |
import locale
import sys
from matplotlib import pylab as plt
from scipy.interpolate import interp1d
import numpy
def parse_pRDF(f):
pRDFs={}
count=0
for line in open(f).readlines():
words=line.split()
if words[0]=="dstep":
dstep=locale.atof(words[1])
continue
elif words[0]=="nstep":
nstep=locale.atof(words[1])
continue
else:
atom_type = words[0]
pRDF=[]
for word in words[1:]:
pRDF.append(locale.atof(word))
pRDFs[atom_type]=pRDF
return (pRDFs,dstep)
f=sys.argv[1]
(pRDFs, dstep)=parse_pRDF(f)
colors={"C":"k", "HC":"r", "N":"b", "HN":"g", "O":"m", "HO":"y", "S":"c"}
X = numpy.linspace(0, 10+dstep, int(10/dstep)+1)
scale=3
Xf = numpy.linspace(0, 10+dstep, (int(10/dstep)+1)*scale)
for atom_type, pRDF in pRDFs.iteritems():
Y=numpy.array(pRDF)
#f = interp1d(X,Y,kind='cubic')
plt.plot(X[:-1],Y[:-1],colors[atom_type], label=atom_type)
#plt.plot(Xf[:-scale],f(Xf)[:-scale],colors[atom_type], label=atom_type)
plt.hold(True)
plt.legend()
plt.xlabel("R (A)")
plt.ylabel("protons/A3")
plt.show()
| madscatt/zazzie | src_2.7/sassie/calculate/old_sascalc/hypred_library/run_sassie/plot.py | Python | gpl-3.0 | 1,043 |
"""Tests for Lib/fractions.py."""
from decimal import Decimal
from test.support import run_unittest
import math
import numbers
import operator
import fractions
import unittest
from copy import copy, deepcopy
from pickle import dumps, loads
F = fractions.Fraction
gcd = fractions.gcd
class DummyFloat(object):
"""Dummy float class for testing comparisons with Fractions"""
def __init__(self, value):
if not isinstance(value, float):
raise TypeError("DummyFloat can only be initialized from float")
self.value = value
def _richcmp(self, other, op):
if isinstance(other, numbers.Rational):
return op(F.from_float(self.value), other)
elif isinstance(other, DummyFloat):
return op(self.value, other.value)
else:
return NotImplemented
def __eq__(self, other): return self._richcmp(other, operator.eq)
def __le__(self, other): return self._richcmp(other, operator.le)
def __lt__(self, other): return self._richcmp(other, operator.lt)
def __ge__(self, other): return self._richcmp(other, operator.ge)
def __gt__(self, other): return self._richcmp(other, operator.gt)
# shouldn't be calling __float__ at all when doing comparisons
def __float__(self):
assert False, "__float__ should not be invoked for comparisons"
# same goes for subtraction
def __sub__(self, other):
assert False, "__sub__ should not be invoked for comparisons"
__rsub__ = __sub__
class DummyRational(object):
"""Test comparison of Fraction with a naive rational implementation."""
def __init__(self, num, den):
g = gcd(num, den)
self.num = num // g
self.den = den // g
def __eq__(self, other):
if isinstance(other, fractions.Fraction):
return (self.num == other._numerator and
self.den == other._denominator)
else:
return NotImplemented
def __lt__(self, other):
return(self.num * other._denominator < self.den * other._numerator)
def __gt__(self, other):
return(self.num * other._denominator > self.den * other._numerator)
def __le__(self, other):
return(self.num * other._denominator <= self.den * other._numerator)
def __ge__(self, other):
return(self.num * other._denominator >= self.den * other._numerator)
# this class is for testing comparisons; conversion to float
# should never be used for a comparison, since it loses accuracy
def __float__(self):
assert False, "__float__ should not be invoked"
class GcdTest(unittest.TestCase):
def testMisc(self):
self.assertEquals(0, gcd(0, 0))
self.assertEquals(1, gcd(1, 0))
self.assertEquals(-1, gcd(-1, 0))
self.assertEquals(1, gcd(0, 1))
self.assertEquals(-1, gcd(0, -1))
self.assertEquals(1, gcd(7, 1))
self.assertEquals(-1, gcd(7, -1))
self.assertEquals(1, gcd(-23, 15))
self.assertEquals(12, gcd(120, 84))
self.assertEquals(-12, gcd(84, -120))
def _components(r):
return (r.numerator, r.denominator)
class FractionTest(unittest.TestCase):
def assertTypedEquals(self, expected, actual):
"""Asserts that both the types and values are the same."""
self.assertEquals(type(expected), type(actual))
self.assertEquals(expected, actual)
def assertRaisesMessage(self, exc_type, message,
callable, *args, **kwargs):
"""Asserts that callable(*args, **kwargs) raises exc_type(message)."""
try:
callable(*args, **kwargs)
except exc_type as e:
self.assertEquals(message, str(e))
else:
self.fail("%s not raised" % exc_type.__name__)
def testInit(self):
self.assertEquals((0, 1), _components(F()))
self.assertEquals((7, 1), _components(F(7)))
self.assertEquals((7, 3), _components(F(F(7, 3))))
self.assertEquals((-1, 1), _components(F(-1, 1)))
self.assertEquals((-1, 1), _components(F(1, -1)))
self.assertEquals((1, 1), _components(F(-2, -2)))
self.assertEquals((1, 2), _components(F(5, 10)))
self.assertEquals((7, 15), _components(F(7, 15)))
self.assertEquals((10**23, 1), _components(F(10**23)))
self.assertEquals((3, 77), _components(F(F(3, 7), 11)))
self.assertEquals((-9, 5), _components(F(2, F(-10, 9))))
self.assertEquals((2486, 2485), _components(F(F(22, 7), F(355, 113))))
self.assertRaisesMessage(ZeroDivisionError, "Fraction(12, 0)",
F, 12, 0)
self.assertRaises(TypeError, F, 1.5)
self.assertRaises(TypeError, F, 1.5 + 3j)
self.assertRaises(TypeError, F, "3/2", 3)
self.assertRaises(TypeError, F, 3, 0j)
self.assertRaises(TypeError, F, 3, 1j)
def testFromString(self):
self.assertEquals((5, 1), _components(F("5")))
self.assertEquals((3, 2), _components(F("3/2")))
self.assertEquals((3, 2), _components(F(" \n +3/2")))
self.assertEquals((-3, 2), _components(F("-3/2 ")))
self.assertEquals((13, 2), _components(F(" 013/02 \n ")))
self.assertEquals((16, 5), _components(F(" 3.2 ")))
self.assertEquals((-16, 5), _components(F(" -3.2 ")))
self.assertEquals((-3, 1), _components(F(" -3. ")))
self.assertEquals((3, 5), _components(F(" .6 ")))
self.assertEquals((1, 3125), _components(F("32.e-5")))
self.assertEquals((1000000, 1), _components(F("1E+06")))
self.assertEquals((-12300, 1), _components(F("-1.23e4")))
self.assertEquals((0, 1), _components(F(" .0e+0\t")))
self.assertEquals((0, 1), _components(F("-0.000e0")))
self.assertRaisesMessage(
ZeroDivisionError, "Fraction(3, 0)",
F, "3/0")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3/'",
F, "3/")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '/2'",
F, "/2")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3 /2'",
F, "3 /2")
self.assertRaisesMessage(
# Denominators don't need a sign.
ValueError, "Invalid literal for Fraction: '3/+2'",
F, "3/+2")
self.assertRaisesMessage(
# Imitate float's parsing.
ValueError, "Invalid literal for Fraction: '+ 3/2'",
F, "+ 3/2")
self.assertRaisesMessage(
# Avoid treating '.' as a regex special character.
ValueError, "Invalid literal for Fraction: '3a2'",
F, "3a2")
self.assertRaisesMessage(
# Don't accept combinations of decimals and rationals.
ValueError, "Invalid literal for Fraction: '3/7.2'",
F, "3/7.2")
self.assertRaisesMessage(
# Don't accept combinations of decimals and rationals.
ValueError, "Invalid literal for Fraction: '3.2/7'",
F, "3.2/7")
self.assertRaisesMessage(
# Allow 3. and .3, but not .
ValueError, "Invalid literal for Fraction: '.'",
F, ".")
def testImmutable(self):
r = F(7, 3)
r.__init__(2, 15)
self.assertEquals((7, 3), _components(r))
self.assertRaises(AttributeError, setattr, r, 'numerator', 12)
self.assertRaises(AttributeError, setattr, r, 'denominator', 6)
self.assertEquals((7, 3), _components(r))
# But if you _really_ need to:
r._numerator = 4
r._denominator = 2
self.assertEquals((4, 2), _components(r))
# Which breaks some important operations:
self.assertNotEquals(F(4, 2), r)
def testFromFloat(self):
self.assertRaises(TypeError, F.from_float, 3+4j)
self.assertEquals((10, 1), _components(F.from_float(10)))
bigint = 1234567890123456789
self.assertEquals((bigint, 1), _components(F.from_float(bigint)))
self.assertEquals((0, 1), _components(F.from_float(-0.0)))
self.assertEquals((10, 1), _components(F.from_float(10.0)))
self.assertEquals((-5, 2), _components(F.from_float(-2.5)))
self.assertEquals((99999999999999991611392, 1),
_components(F.from_float(1e23)))
self.assertEquals(float(10**23), float(F.from_float(1e23)))
self.assertEquals((3602879701896397, 1125899906842624),
_components(F.from_float(3.2)))
self.assertEquals(3.2, float(F.from_float(3.2)))
inf = 1e1000
nan = inf - inf
self.assertRaisesMessage(
TypeError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
TypeError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
TypeError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
self.assertRaises(TypeError, F.from_decimal, 3+4j)
self.assertEquals(F(10, 1), F.from_decimal(10))
self.assertEquals(F(0), F.from_decimal(Decimal("-0")))
self.assertEquals(F(5, 10), F.from_decimal(Decimal("0.5")))
self.assertEquals(F(5, 1000), F.from_decimal(Decimal("5e-3")))
self.assertEquals(F(5000), F.from_decimal(Decimal("5e3")))
self.assertEquals(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
self.assertRaisesMessage(
TypeError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
TypeError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
rpi = F('3.1415926535897932')
self.assertEqual(rpi.limit_denominator(10000), F(355, 113))
self.assertEqual(-rpi.limit_denominator(10000), F(-355, 113))
self.assertEqual(rpi.limit_denominator(113), F(355, 113))
self.assertEqual(rpi.limit_denominator(112), F(333, 106))
self.assertEqual(F(201, 200).limit_denominator(100), F(1))
self.assertEqual(F(201, 200).limit_denominator(101), F(102, 101))
self.assertEqual(F(0).limit_denominator(10000), F(0))
def testConversions(self):
self.assertTypedEquals(-1, math.trunc(F(-11, 10)))
self.assertTypedEquals(-2, math.floor(F(-11, 10)))
self.assertTypedEquals(-1, math.ceil(F(-11, 10)))
self.assertTypedEquals(-1, math.ceil(F(-10, 10)))
self.assertTypedEquals(-1, int(F(-11, 10)))
self.assertTypedEquals(0, round(F(-1, 10)))
self.assertTypedEquals(0, round(F(-5, 10)))
self.assertTypedEquals(-2, round(F(-15, 10)))
self.assertTypedEquals(-1, round(F(-7, 10)))
self.assertEquals(False, bool(F(0, 1)))
self.assertEquals(True, bool(F(3, 2)))
self.assertTypedEquals(0.1, float(F(1, 10)))
# Check that __float__ isn't implemented by converting the
# numerator and denominator to float before dividing.
self.assertRaises(OverflowError, float, int('2'*400+'7'))
self.assertAlmostEquals(2.0/3,
float(F(int('2'*400+'7'), int('3'*400+'1'))))
self.assertTypedEquals(0.1+0j, complex(F(1,10)))
def testRound(self):
self.assertTypedEquals(F(-200), round(F(-150), -2))
self.assertTypedEquals(F(-200), round(F(-250), -2))
self.assertTypedEquals(F(30), round(F(26), -1))
self.assertTypedEquals(F(-2, 10), round(F(-15, 100), 1))
self.assertTypedEquals(F(-2, 10), round(F(-25, 100), 1))
def testArithmetic(self):
self.assertEquals(F(1, 2), F(1, 10) + F(2, 5))
self.assertEquals(F(-3, 10), F(1, 10) - F(2, 5))
self.assertEquals(F(1, 25), F(1, 10) * F(2, 5))
self.assertEquals(F(1, 4), F(1, 10) / F(2, 5))
self.assertTypedEquals(2, F(9, 10) // F(2, 5))
self.assertTypedEquals(10**23, F(10**23, 1) // F(1))
self.assertEquals(F(2, 3), F(-7, 3) % F(3, 2))
self.assertEquals(F(8, 27), F(2, 3) ** F(3))
self.assertEquals(F(27, 8), F(2, 3) ** F(-3))
self.assertTypedEquals(2.0, F(4) ** F(1, 2))
z = pow(F(-1), F(1, 2))
self.assertAlmostEquals(z.real, 0)
self.assertEquals(z.imag, 1)
def testMixedArithmetic(self):
self.assertTypedEquals(F(11, 10), F(1, 10) + 1)
self.assertTypedEquals(1.1, F(1, 10) + 1.0)
self.assertTypedEquals(1.1 + 0j, F(1, 10) + (1.0 + 0j))
self.assertTypedEquals(F(11, 10), 1 + F(1, 10))
self.assertTypedEquals(1.1, 1.0 + F(1, 10))
self.assertTypedEquals(1.1 + 0j, (1.0 + 0j) + F(1, 10))
self.assertTypedEquals(F(-9, 10), F(1, 10) - 1)
self.assertTypedEquals(-0.9, F(1, 10) - 1.0)
self.assertTypedEquals(-0.9 + 0j, F(1, 10) - (1.0 + 0j))
self.assertTypedEquals(F(9, 10), 1 - F(1, 10))
self.assertTypedEquals(0.9, 1.0 - F(1, 10))
self.assertTypedEquals(0.9 + 0j, (1.0 + 0j) - F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) * 1)
self.assertTypedEquals(0.1, F(1, 10) * 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) * (1.0 + 0j))
self.assertTypedEquals(F(1, 10), 1 * F(1, 10))
self.assertTypedEquals(0.1, 1.0 * F(1, 10))
self.assertTypedEquals(0.1 + 0j, (1.0 + 0j) * F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) / 1)
self.assertTypedEquals(0.1, F(1, 10) / 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) / (1.0 + 0j))
self.assertTypedEquals(F(10, 1), 1 / F(1, 10))
self.assertTypedEquals(10.0, 1.0 / F(1, 10))
self.assertTypedEquals(10.0 + 0j, (1.0 + 0j) / F(1, 10))
self.assertTypedEquals(0, F(1, 10) // 1)
self.assertTypedEquals(0, F(1, 10) // 1.0)
self.assertTypedEquals(10, 1 // F(1, 10))
self.assertTypedEquals(10**23, 10**22 // F(1, 10))
self.assertTypedEquals(10, 1.0 // F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) % 1)
self.assertTypedEquals(0.1, F(1, 10) % 1.0)
self.assertTypedEquals(F(0, 1), 1 % F(1, 10))
self.assertTypedEquals(0.0, 1.0 % F(1, 10))
# No need for divmod since we don't override it.
# ** has more interesting conversion rules.
self.assertTypedEquals(F(100, 1), F(1, 10) ** -2)
self.assertTypedEquals(F(100, 1), F(10, 1) ** 2)
self.assertTypedEquals(0.1, F(1, 10) ** 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) ** (1.0 + 0j))
self.assertTypedEquals(4 , 2 ** F(2, 1))
z = pow(-1, F(1, 2))
self.assertAlmostEquals(0, z.real)
self.assertEquals(1, z.imag)
self.assertTypedEquals(F(1, 4) , 2 ** F(-2, 1))
self.assertTypedEquals(2.0 , 4 ** F(1, 2))
self.assertTypedEquals(0.25, 2.0 ** F(-2, 1))
self.assertTypedEquals(1.0 + 0j, (1.0 + 0j) ** F(1, 10))
def testMixingWithDecimal(self):
# Decimal refuses mixed comparisons.
self.assertRaisesMessage(
TypeError,
"unsupported operand type(s) for +: 'Fraction' and 'Decimal'",
operator.add, F(3,11), Decimal('3.1415926'))
self.assertNotEquals(F(5, 2), Decimal('2.5'))
def testComparisons(self):
self.assertTrue(F(1, 2) < F(2, 3))
self.assertFalse(F(1, 2) < F(1, 2))
self.assertTrue(F(1, 2) <= F(2, 3))
self.assertTrue(F(1, 2) <= F(1, 2))
self.assertFalse(F(2, 3) <= F(1, 2))
self.assertTrue(F(1, 2) == F(1, 2))
self.assertFalse(F(1, 2) == F(1, 3))
self.assertFalse(F(1, 2) != F(1, 2))
self.assertTrue(F(1, 2) != F(1, 3))
def testComparisonsDummyRational(self):
self.assertTrue(F(1, 2) == DummyRational(1, 2))
self.assertTrue(DummyRational(1, 2) == F(1, 2))
self.assertFalse(F(1, 2) == DummyRational(3, 4))
self.assertFalse(DummyRational(3, 4) == F(1, 2))
self.assertTrue(F(1, 2) < DummyRational(3, 4))
self.assertFalse(F(1, 2) < DummyRational(1, 2))
self.assertFalse(F(1, 2) < DummyRational(1, 7))
self.assertFalse(F(1, 2) > DummyRational(3, 4))
self.assertFalse(F(1, 2) > DummyRational(1, 2))
self.assertTrue(F(1, 2) > DummyRational(1, 7))
self.assertTrue(F(1, 2) <= DummyRational(3, 4))
self.assertTrue(F(1, 2) <= DummyRational(1, 2))
self.assertFalse(F(1, 2) <= DummyRational(1, 7))
self.assertFalse(F(1, 2) >= DummyRational(3, 4))
self.assertTrue(F(1, 2) >= DummyRational(1, 2))
self.assertTrue(F(1, 2) >= DummyRational(1, 7))
self.assertTrue(DummyRational(1, 2) < F(3, 4))
self.assertFalse(DummyRational(1, 2) < F(1, 2))
self.assertFalse(DummyRational(1, 2) < F(1, 7))
self.assertFalse(DummyRational(1, 2) > F(3, 4))
self.assertFalse(DummyRational(1, 2) > F(1, 2))
self.assertTrue(DummyRational(1, 2) > F(1, 7))
self.assertTrue(DummyRational(1, 2) <= F(3, 4))
self.assertTrue(DummyRational(1, 2) <= F(1, 2))
self.assertFalse(DummyRational(1, 2) <= F(1, 7))
self.assertFalse(DummyRational(1, 2) >= F(3, 4))
self.assertTrue(DummyRational(1, 2) >= F(1, 2))
self.assertTrue(DummyRational(1, 2) >= F(1, 7))
def testComparisonsDummyFloat(self):
x = DummyFloat(1./3.)
y = F(1, 3)
self.assertTrue(x != y)
self.assertTrue(x < y or x > y)
self.assertFalse(x == y)
self.assertFalse(x <= y and x >= y)
self.assertTrue(y != x)
self.assertTrue(y < x or y > x)
self.assertFalse(y == x)
self.assertFalse(y <= x and y >= x)
def testMixedLess(self):
self.assertTrue(2 < F(5, 2))
self.assertFalse(2 < F(4, 2))
self.assertTrue(F(5, 2) < 3)
self.assertFalse(F(4, 2) < 2)
self.assertTrue(F(1, 2) < 0.6)
self.assertFalse(F(1, 2) < 0.4)
self.assertTrue(0.4 < F(1, 2))
self.assertFalse(0.5 < F(1, 2))
self.assertFalse(float('inf') < F(1, 2))
self.assertTrue(float('-inf') < F(0, 10))
self.assertFalse(float('nan') < F(-3, 7))
self.assertTrue(F(1, 2) < float('inf'))
self.assertFalse(F(17, 12) < float('-inf'))
self.assertFalse(F(144, -89) < float('nan'))
def testMixedLessEqual(self):
self.assertTrue(0.5 <= F(1, 2))
self.assertFalse(0.6 <= F(1, 2))
self.assertTrue(F(1, 2) <= 0.5)
self.assertFalse(F(1, 2) <= 0.4)
self.assertTrue(2 <= F(4, 2))
self.assertFalse(2 <= F(3, 2))
self.assertTrue(F(4, 2) <= 2)
self.assertFalse(F(5, 2) <= 2)
self.assertFalse(float('inf') <= F(1, 2))
self.assertTrue(float('-inf') <= F(0, 10))
self.assertFalse(float('nan') <= F(-3, 7))
self.assertTrue(F(1, 2) <= float('inf'))
self.assertFalse(F(17, 12) <= float('-inf'))
self.assertFalse(F(144, -89) <= float('nan'))
def testBigFloatComparisons(self):
# Because 10**23 can't be represented exactly as a float:
self.assertFalse(F(10**23) == float(10**23))
# The first test demonstrates why these are important.
self.assertFalse(1e23 < float(F(math.trunc(1e23) + 1)))
self.assertTrue(1e23 < F(math.trunc(1e23) + 1))
self.assertFalse(1e23 <= F(math.trunc(1e23) - 1))
self.assertTrue(1e23 > F(math.trunc(1e23) - 1))
self.assertFalse(1e23 >= F(math.trunc(1e23) + 1))
def testBigComplexComparisons(self):
self.assertFalse(F(10**23) == complex(10**23))
self.assertTrue(F(10**23) > complex(10**23))
self.assertFalse(F(10**23) <= complex(10**23))
def testMixedEqual(self):
self.assertTrue(0.5 == F(1, 2))
self.assertFalse(0.6 == F(1, 2))
self.assertTrue(F(1, 2) == 0.5)
self.assertFalse(F(1, 2) == 0.4)
self.assertTrue(2 == F(4, 2))
self.assertFalse(2 == F(3, 2))
self.assertTrue(F(4, 2) == 2)
self.assertFalse(F(5, 2) == 2)
self.assertFalse(F(5, 2) == float('nan'))
self.assertFalse(float('nan') == F(3, 7))
self.assertFalse(F(5, 2) == float('inf'))
self.assertFalse(float('-inf') == F(2, 5))
def testStringification(self):
self.assertEquals("Fraction(7, 3)", repr(F(7, 3)))
self.assertEquals("Fraction(6283185307, 2000000000)",
repr(F('3.1415926535')))
self.assertEquals("Fraction(-1, 100000000000000000000)",
repr(F(1, -10**20)))
self.assertEquals("7/3", str(F(7, 3)))
self.assertEquals("7", str(F(7, 1)))
def testHash(self):
self.assertEquals(hash(2.5), hash(F(5, 2)))
self.assertEquals(hash(10**50), hash(F(10**50)))
self.assertNotEquals(hash(float(10**23)), hash(F(10**23)))
def testApproximatePi(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
three = F(3)
lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24
while abs(s - lasts) > F(1, 10**9):
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
self.assertAlmostEquals(math.pi, s)
def testApproximateCos1(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
x = F(1)
i, lasts, s, fact, num, sign = 0, 0, F(1), 1, 1, 1
while abs(s - lasts) > F(1, 10**9):
lasts = s
i += 2
fact *= i * (i-1)
num *= x * x
sign *= -1
s += num / fact * sign
self.assertAlmostEquals(math.cos(1), s)
def test_copy_deepcopy_pickle(self):
r = F(13, 7)
self.assertEqual(r, loads(dumps(r)))
self.assertEqual(id(r), id(copy(r)))
self.assertEqual(id(r), id(deepcopy(r)))
def test_slots(self):
# Issue 4998
r = F(13, 7)
self.assertRaises(AttributeError, setattr, r, 'a', 10)
def test_main():
run_unittest(FractionTest, GcdTest)
if __name__ == '__main__':
test_main()
| mancoast/CPythonPyc_test | fail/312_test_fractions.py | Python | gpl-3.0 | 22,695 |
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((3171.68, 9029.21, -1139.01), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((3173.32, 7641.65, -7.59267), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((3803.4, 8043.31, 1799.07), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((2666.72, 9556.45, 456.26), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((2401.45, 10810.3, 1711.38), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((3826.97, 10319.9, 3558.07), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((5267.4, 10161.5, 4525.44), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((4933.12, 10729, 4046.72), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((6683.05, 9391.65, 5497.93), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((8208.82, 10014.2, 5535.15), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((8867.56, 8468.95, 6393.47), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((8619.61, 7593.49, 5646.83), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((9013.94, 6288.27, 4803.55), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((8872.4, 7041.74, 3460.97), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10190.2, 5903.46, 2028.9), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((9719.44, 3094.41, 731.581), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((8077.39, 2602.05, 1658.32), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((8868.48, 2528.27, 2892.63), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((9165.75, 3921.61, 3841.11), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((10301.5, 4486.01, 4688.54), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((8522.51, 5837.01, 5636.16), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((9493.49, 4172.69, 4953.76), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((9034.89, 3774.53, 5847.71), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((8777.35, 2523.89, 5440.65), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((8573.36, 2540.82, 4012.57), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((8998.9, 1643.92, 2683.61), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8990.55, 2742.59, 3735.47), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((8309.5, 4779.28, 4133.88), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((8469.04, 4310.3, 5552.9), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((8405.08, 5264.39, 6396.31), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((8116.19, 4748.65, 6198.76), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((8458.16, 6381.13, 5980.73), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((9198.38, 4960.14, 6673.69), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((8369.35, 3853.39, 6088.2), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((8954.58, 3356.37, 5028.9), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((9942.46, 2604.05, 4445.91), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((8717.21, 4559.17, 5253.99), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((9086.66, 2734.02, 4935.49), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((8554.69, 3256.13, 5181.72), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((9247.67, 3166.59, 6786.78), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((8475.56, 4449.18, 7763.97), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((8596.68, 5576.75, 9330.28), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((8343.23, 4423.75, 11822.2), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((7645.23, 6310.08, 11514.1), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((7651.25, 5112.44, 10427.6), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((7252.38, 5829.06, 8673.58), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((5822.24, 4545.28, 9000.62), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((7049.07, 3628.54, 10363), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((5790.73, 4510.11, 9353.05), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((5336.85, 5999.02, 8251.6), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((5443.04, 6367.25, 9673.41), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((5998.57, 6987.2, 8170.17), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((6305.8, 7375.7, 6388.49), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((6187.92, 8875.01, 6747.26), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((5813.94, 8918.86, 7379.07), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((5344.98, 6850.66, 7496.74), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((3547.24, 6649.15, 6261.57), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((1739.99, 4938.35, 5834.09), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((1359.58, 4478.74, 5766.47), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((962.469, 5161.48, 5521.67), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((1623.11, 5203.75, 5999.69), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((908.239, 5438.28, 6510.08), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((2722.36, 5848.41, 6088.2), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((844.664, 6076.2, 6217.11), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((-1152.26, 5619.77, 6199.34), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((-307.851, 5766.97, 7789.24), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((-820.689, 4185.61, 7920.82), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((1117.85, 5548.01, 7239.77), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((-696.022, 5054.33, 6438.05), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((-1176.59, 3758.02, 7301.85), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((185.745, 3054.8, 7200.01), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/SHH_WT_models12702.py | Python | gpl-3.0 | 17,583 |
"""User preferences for KlustaViewa."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import logging
import numpy as np
# -----------------------------------------------------------------------------
# Logging
# -----------------------------------------------------------------------------
# Console logging level, can be DEBUG, INFO or WARNING.
loglevel = logging.INFO
# Level of the logging file. DEBUG, INFO or WARNING, or just None to disable.
loglevel_file = logging.INFO
# -----------------------------------------------------------------------------
# Main window
# -----------------------------------------------------------------------------
# Should the software ask the user to save upon closing?
prompt_save_on_exit = True
delay_timer = .05
delay_buffer = .1
# -----------------------------------------------------------------------------
# Similarity matrix
# -----------------------------------------------------------------------------
similarity_measure = 'gaussian' # or 'kl' for KL divergence
# -----------------------------------------------------------------------------
# Waveform view
# -----------------------------------------------------------------------------
# Approximate maximum number of spikes pper cluster to show. Should be
# about 100 for low-end graphics cards, 1000 for high-end ones.
waveforms_nspikes_max_expected = 100
# The minimum number of spikes per cluster to display.
waveforms_nspikes_per_cluster_min = 10
# -----------------------------------------------------------------------------
# Feature view
# -----------------------------------------------------------------------------
# Opacity value of the background spikes.
feature_background_alpha = .25
# Opacity value of the spikes in the selected clusters.
feature_selected_alpha = .75
# Number of spikes to show in the background.
features_nspikes_background_max = 10000
# Maximum number of spikes per cluster to show.
features_nspikes_per_cluster_max = 1000
# Unit of the spike time in the feature view. Can be 'samples' or 'second'.
features_info_time_unit = 'second'
# -----------------------------------------------------------------------------
# Correlograms view
# -----------------------------------------------------------------------------
# Maximum number of clusters to show in the correlograms view.
correlograms_max_nclusters = 20
correlograms_nexcerpts = 100
correlograms_excerpt_size = 20000
# -----------------------------------------------------------------------------
# IPython import path
# -----------------------------------------------------------------------------
# Paths where all .py files are loaded in IPython view.
# "~" corresponds to the user home path, C:\Users\Username\ on Windows,
# /home/username/ on Linux, etc.
ipython_import_paths = ['~/.kwiklib/code']
# -----------------------------------------------------------------------------
# Unit tests
# -----------------------------------------------------------------------------
# Delay between two successive automatic operations in unit tests for views.
test_operator_delay = .1
# Whether to automatically close the views during unit testing.
test_auto_close = True
| DavidTingley/ephys-processing-pipeline | installation/klustaviewa-0.3.0/kwiklib/utils/preferences_default.py | Python | gpl-3.0 | 3,402 |
# -*- coding: utf-8 -*-
import re
import lxml.html
from billy.scrape.committees import CommitteeScraper, Committee
from openstates.utils import LXMLMixin
class NYCommitteeScraper(CommitteeScraper, LXMLMixin):
jurisdiction = "ny"
latest_only = True
def _parse_name(self, name):
"""
Split a committee membership string into name and role.
>>> parse_name('Felix Ortiz')
('Felix Ortiz', 'member')
>>> parse_name('Felix Ortiz (Chair)')
('Felix Ortiz', 'chair')
>>> parse_name('Hon. Felix Ortiz, Co-Chair')
('Felix Ortiz', 'co-chair')
>>> parse_name('Owen H.\\r\\nJohnson (Vice Chairperson)')
('Owen H. Johnson', 'vice chairperson')
"""
name = re.sub(r'^(Hon\.|Assemblyman|Assemblywoman)\s+', '', name)
name = re.sub(r'\s+', ' ', name)
roles = ['Chairwoman', 'Chairperson', 'Chair', 'Secretary',
'Treasurer', 'Parliamentarian', 'Chaplain']
match = re.match(
r'([^(]+),? \(?((Co|Vice)?-?\s*(%s))\)?' % '|'.join(roles),
name)
role = 'member'
if match:
name = match.group(1).strip(' ,')
role = match.group(2).lower()
name = name.replace('Sen.', '').replace('Rep.', '').strip()
return (name, role)
def scrape(self, chamber, term):
getattr(self, 'scrape_' + chamber + '_chamber')()
def scrape_lower_chamber(self, only_names=None):
url = 'http://assembly.state.ny.us/comm/'
page = self.lxmlize(url)
committees = []
link_nodes = self.get_nodes(
page,
'//a[contains(@href, "sec=mem")]')
for link_node in link_nodes:
committee_name_text = self.get_node(
link_node,
'../strong/text()')
if committee_name_text is not None:
committee_name = committee_name_text.strip()
assert committee_name
if 'Caucus' in committee_name:
continue
committees.append(committee_name)
url = link_node.attrib['href']
committee = self.scrape_lower_committee(committee_name, url)
self.save_committee(committee)
return committees
def scrape_lower_committee(self, name, url):
page = self.lxmlize(url)
committee = Committee('lower', name)
committee.add_source(url)
seen = set()
member_links = self.get_nodes(
page,
'//div[@class="commlinks"]//a[contains(@href, "mem")]')
for member_link in member_links:
member_name = None
member_role = None
member_text = member_link.text
if member_text is not None:
member = member_text.strip()
member = re.sub(r'\s+', ' ', member)
member_name, member_role = self._parse_name(member)
if member_name is None:
continue
# Figure out if this person is the chair.
role_type = self.get_node(
member_link,
'../../preceding-sibling::div[1]/text()')
if role_type in (['Chair'], ['Co-Chair']):
member_role = 'chair'
else:
member_role = 'member'
if name not in seen:
committee.add_member(member_name, member_role)
seen.add(member_name)
return committee
def scrape_upper_chamber(self):
url = 'http://www.nysenate.gov/senators-committees'
page = self.lxmlize(url)
committees = []
committee_nodes = self.get_nodes(
page,
'//div[@id="c-committees-container"][1]//'
'a[@class="c-committee-link"]')
for committee_node in committee_nodes:
name_text = self.get_node(
committee_node,
'./h4[@class="c-committee-title"][1]/text()')
if name_text is not None:
name = name_text.strip()
assert name
committees.append(name)
# Retrieve committee information.
committee_url = committee_node.attrib['href']
committee = self.scrape_upper_committee(name,
committee_url)
self.save_committee(committee)
return committees
def scrape_upper_committee(self, committee_name, url):
page = self.lxmlize(url)
committee = Committee('upper', committee_name)
committee.add_source(url)
# Committee member attributes.
member_name = None
member_role = None
# Attempt to record the committee chair.
committee_chair = self.get_node(
page,
'//div[@class="nys-senator" and div[@class="nys-senator--info"'
' and p[@class="nys-senator--title" and'
' normalize-space(text())="Chair"]]]')
if committee_chair is not None:
info_node = self.get_node(
committee_chair,
'div[@class="nys-senator--info" and p[@class='
'"nys-senator--title" and contains(text(), "Chair")]]')
if info_node is not None:
# Attempt to retrieve committee chair's name.
member_name_text = self.get_node(
info_node,
'./h4[@class="nys-senator--name"][1]/a[1]/text()')
if member_name_text is not None:
member_name = member_name_text.strip()
else:
warning = ('Could not find the name of the chair for the'
' {} committee')
self.logger.warning(warning.format(committee_name))
# Attempt to retrieve committee chair's role (explicitly).
member_role_text = self.get_node(
info_node,
'./p[@class="nys-senator--title" and contains(text(), '
'"Chair")][1]/text()')
if member_role_text is not None:
member_role = member_role_text.strip()
else:
# This seems like a silly case, but could still be useful
# to check for.
warning = ('Could not find the role of the chair for the'
' {} committee')
self.logger.warning(warning.format(committee_name))
if member_name is not None and member_role is not None:
committee.add_member(member_name, member_role)
else:
warning = ('Could not find information for the chair of the'
' {} committee.')
self.logger.warning(warning.format(committee_name))
else:
warning = 'Missing chairperson for the {} committee.'
self.logger.warning(warning.format(committee_name))
# Get list of regular committee members.
member_nodes = self.get_nodes(
page,
'//div[contains(concat(" ", @class, " "), '
'" c-senators-container ")]//div[@class="view-content"]/'
' div/a')
# Attempt to record each committee member.
for member_node in member_nodes:
member_name = None
member_name_text = self.get_node(
member_node,
'.//div[@class="nys-senator--info"][1]/h4[@class='
'"nys-senator--name"][1]/text()')
if member_name_text is not None:
member_name = member_name_text.strip()
if member_name is not None:
committee.add_member(member_name, 'member')
else:
warning = ('Could not find the name of a member in the {}'
' committee')
self.logger.warning(warning.format(committee_name))
return committee
| cliftonmcintosh/openstates | openstates/ny/committees.py | Python | gpl-3.0 | 8,055 |
# coding=utf-8
HOSTNAME = 'localhost'
DATABASE = 'r'
USERNAME = 'web'
PASSWORD = 'web'
DB_URI = 'mysql://{}:{}@{}/{}'.format(
USERNAME, PASSWORD, HOSTNAME, DATABASE)
| dongweiming/web_develop | chapter3/section4/consts.py | Python | gpl-3.0 | 170 |
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
###########################################################################
# #
# ESPResSo++ Benchmark Python script for a polymer melt #
# #
###########################################################################
import sys
import time
import espresso
import mpi4py.MPI as MPI
import logging
from espresso import Real3D, Int3D
from espresso.tools import lammps, gromacs
from espresso.tools import decomp, timers, replicate
# simulation parameters (nvt = False is nve)
steps = 1000
rc = 1.12
skin = 0.3
nvt = True
timestep = 0.01
######################################################################
### IT SHOULD BE UNNECESSARY TO MAKE MODIFICATIONS BELOW THIS LINE ###
######################################################################
sys.stdout.write('Setting up simulation ...\n')
bonds, angles, x, y, z, Lx, Ly, Lz = lammps.read('espressopp_polymer_melt.start')
bonds, angles, x, y, z, Lx, Ly, Lz = replicate(bonds, angles, x, y, z, Lx, Ly, Lz, xdim=1, ydim=1, zdim=1)
num_particles = len(x)
density = num_particles / (Lx * Ly * Lz)
size = (Lx, Ly, Lz)
system = espresso.System()
system.rng = espresso.esutil.RNG()
system.bc = espresso.bc.OrthorhombicBC(system.rng, size)
system.skin = skin
comm = MPI.COMM_WORLD
nodeGrid = espresso.tools.decomp.nodeGrid(comm.size)
cellGrid = espresso.tools.decomp.cellGrid(size,nodeGrid,rc,skin)
system.storage = espresso.storage.DomainDecomposition(system, nodeGrid, cellGrid)
# add particles to the system and then decompose
# do this in chunks of 1000 particles to speed it up
props = ['id', 'type', 'mass', 'pos']
new_particles = []
for i in range(num_particles):
part = [i + 1, 0, 1.0, Real3D(x[i], y[i], z[i])]
new_particles.append(part)
if i % 1000 == 0:
system.storage.addParticles(new_particles, *props)
system.storage.decompose()
new_particles = []
system.storage.addParticles(new_particles, *props)
system.storage.decompose()
# Lennard-Jones with Verlet list
vl = espresso.VerletList(system, cutoff = rc + system.skin)
potLJ = espresso.interaction.LennardJones(1.0, 1.0, cutoff = rc, shift = False)
interLJ = espresso.interaction.VerletListLennardJones(vl)
interLJ.setPotential(type1 = 0, type2 = 0, potential = potLJ)
system.addInteraction(interLJ)
# FENE bonds
fpl = espresso.FixedPairList(system.storage)
fpl.addBonds(bonds)
potFENE = espresso.interaction.FENE(K=30.0, r0=0.0, rMax=1.5)
interFENE = espresso.interaction.FixedPairListFENE(system, fpl, potFENE)
system.addInteraction(interFENE)
# Cosine with FixedTriple list
ftl = espresso.FixedTripleList(system.storage)
ftl.addTriples(angles)
potCosine = espresso.interaction.Cosine(K=1.5, theta0=3.1415926)
interCosine = espresso.interaction.FixedTripleListCosine(system, ftl, potCosine)
#interCosine.setPotential(type1 = 0, type2 = 0, potential = potCosine)
system.addInteraction(interCosine)
# integrator
integrator = espresso.integrator.VelocityVerlet(system)
integrator.dt = timestep
if(nvt):
langevin = espresso.integrator.LangevinThermostat(system)
langevin.gamma = 1.0
langevin.temperature = 1.0
integrator.addExtension(langevin)
# print simulation parameters
print ''
print 'number of particles =', num_particles
print 'density = %.4f' % (density)
print 'rc =', rc
print 'dt =', integrator.dt
print 'skin =', system.skin
print 'nvt =', nvt
print 'steps =', steps
print 'NodeGrid = %s' % (nodeGrid)
print 'CellGrid = %s' % (cellGrid)
print ''
# analysis
# configurations = espresso.analysis.Configurations(system)
# configurations.gather()
temperature = espresso.analysis.Temperature(system)
pressure = espresso.analysis.Pressure(system)
pressureTensor = espresso.analysis.PressureTensor(system)
fmt = '%5d %8.4f %10.5f %8.5f %12.3f %12.3f %12.3f %12.3f %12.3f\n'
T = temperature.compute()
P = pressure.compute()
Pij = pressureTensor.compute()
Ek = 0.5 * T * (3 * num_particles)
Ep = interLJ.computeEnergy()
Eb = interFENE.computeEnergy()
Ea = interCosine.computeEnergy()
Etotal = Ek + Ep + Eb + Ea
sys.stdout.write(' step T P Pxy etotal ekinetic epair ebond eangle\n')
sys.stdout.write(fmt % (0, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea))
start_time = time.clock()
integrator.run(steps)
end_time = time.clock()
T = temperature.compute()
P = pressure.compute()
Pij = pressureTensor.compute()
Ek = 0.5 * T * (3 * num_particles)
Ep = interLJ.computeEnergy()
Eb = interFENE.computeEnergy()
Ea = interCosine.computeEnergy()
Etotal = Ek + Ep + Eb + Ea
sys.stdout.write(fmt % (steps, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea))
sys.stdout.write('\n')
# print timings and neighbor list information
timers.show(integrator.getTimers(), precision=2)
sys.stdout.write('Total # of neighbors = %d\n' % vl.totalSize())
sys.stdout.write('Ave neighs/atom = %.1f\n' % (vl.totalSize() / float(num_particles)))
sys.stdout.write('Neighbor list builds = %d\n' % vl.builds)
sys.stdout.write('Integration steps = %d\n' % integrator.step)
sys.stdout.write('CPUs = %i CPU time per CPU = %.1f\n' % (comm.size,end_time - start_time))
| acfogarty/espressopp | bench/polymer_melt/espressopp/espressopp_polymer_melt.py | Python | gpl-3.0 | 5,227 |
"""
Tests for serial.py.
"""
import cPickle
from cStringIO import StringIO
import gzip
import shutil
import tempfile
import unittest
from rdkit import Chem
from rdkit.Chem import AllChem
from vs_utils.utils.rdkit_utils import conformers, serial
class TestMolIO(unittest.TestCase):
"""
Base test class for molecule I/O.
"""
def setUp(self):
"""
Write SDF and SMILES molecules to temporary files.
"""
self.temp_dir = tempfile.mkdtemp()
# aspirin
self.aspirin = self._get_mol_from_smiles('CC(=O)OC1=CC=CC=C1C(=O)O',
'aspirin')
self.aspirin_h = Chem.AddHs(self.aspirin)
self.aspirin_sodium = self._get_mol_from_smiles(
'CC(=O)OC1=CC=CC=C1C(=O)[O-].[Na+]', 'aspirin sodium')
# levalbuterol (chiral)
self.levalbuterol = self._get_mol_from_smiles(
'CC(C)(C)NC[C@@H](C1=CC(=C(C=C1)O)CO)O', 'levalbuterol')
self.levalbuterol_hcl = self._get_mol_from_smiles(
'CC(C)(C)NC[C@@H](C1=CC(=C(C=C1)O)CO)O.Cl',
'levalbuterol hydrochloride')
self.ref_mols = [self.aspirin, self.levalbuterol]
self.reader = serial.MolReader(compute_2d_coords=False)
def _get_mol_from_smiles(self, smiles, name=None):
"""
Construct a molecule from a SMILES string.
Molecules loaded from SMILES strings have zero conformers, but
molecules loaded from SDF blocks are treated as 3D and have one
conformer even if coordinates are not set. This method dumps the
molecule to SDF and loads it again to obtain a molecule with one
conformer.
Parameters
----------
smiles : str
SMILES string.
name : str, optional
Molecule name.
"""
mol = Chem.MolFromSmiles(smiles)
if name is not None:
mol.SetProp('_Name', name)
AllChem.Compute2DCoords(mol) # required to preserve stereo
sdf = Chem.MolToMolBlock(mol, includeStereo=True)
mol_with_conf = Chem.MolFromMolBlock(sdf)
return mol_with_conf
def tearDown(self):
"""
Clean up temporary files.
"""
shutil.rmtree(self.temp_dir)
def test_guess_mol_format(self):
"""
Test MolIO.guess_mol_format.
"""
mol_formats = {
'pkl': ['test.pkl', 'test.pkl.gz', 'test.test.pkl',
'test.test.pkl.gz'],
'sdf': ['test.sdf', 'test.sdf.gz', 'test.test.sdf',
'test.test.sdf.gz'],
'smi': ['test.smi', 'test.smi.gz', 'test.can', 'test.can.gz',
'test.ism', 'test.ism.gz', 'test.test.smi',
'test.test.smi.gz']
}
for mol_format in mol_formats.keys():
for filename in mol_formats[mol_format]:
assert self.reader.guess_mol_format(filename) == mol_format
def test_close_context(self):
"""
Make sure MolIO closes files it opened.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
self.reader.open(filename)
self.reader.close()
assert self.reader.f.closed
# also test the context manager
with self.reader.open(filename):
pass
assert self.reader.f.closed
def test_not_close_other(self):
"""
Make sure MolIO doesn't close files it didn't open.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename) as f:
reader = serial.MolReader(f, mol_format='sdf')
reader.close()
assert not f.closed
# also test the context manager
with open(filename) as g:
with serial.MolReader(g, mol_format='sdf'):
pass
assert not g.closed
class TestMolReader(TestMolIO):
"""
Test MolReader.
"""
def test_read_sdf(self):
"""
Read an SDF file.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin))
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_sdf_gz(self):
"""
Read a compressed SDF file.
"""
_, filename = tempfile.mkstemp(suffix='.sdf.gz', dir=self.temp_dir)
with gzip.open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin))
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_smi(self):
"""
Read a SMILES file.
"""
self.aspirin.RemoveAllConformers() # SMILES are read without confs
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write(Chem.MolToSmiles(self.aspirin))
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_smi_title(self):
"""
Read a SMILES file with molecule titles.
"""
self.aspirin.RemoveAllConformers() # SMILES are read without confs
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write('{}\t{}'.format(Chem.MolToSmiles(self.aspirin), 'aspirin'))
self.reader.open(filename)
mols = self.reader.get_mols()
mol = mols.next()
assert mol.ToBinary() == self.aspirin.ToBinary()
assert mol.GetProp('_Name') == self.aspirin.GetProp('_Name')
def test_read_smi_gz(self):
"""
Read a compressed SMILES file.
"""
self.aspirin.RemoveAllConformers() # SMILES are read without confs
_, filename = tempfile.mkstemp(suffix='.smi.gz', dir=self.temp_dir)
with gzip.open(filename, 'wb') as f:
f.write(Chem.MolToSmiles(self.aspirin))
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_pickle(self):
"""
Read from a pickle.
"""
_, filename = tempfile.mkstemp(suffix='.pkl', dir=self.temp_dir)
with open(filename, 'wb') as f:
cPickle.dump([self.aspirin], f, cPickle.HIGHEST_PROTOCOL)
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_pickle_gz(self):
"""
Read from a compressed pickle.
"""
_, filename = tempfile.mkstemp(suffix='.pkl.gz', dir=self.temp_dir)
with gzip.open(filename, 'wb') as f:
cPickle.dump([self.aspirin], f, cPickle.HIGHEST_PROTOCOL)
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_file_like(self):
"""
Read from a file-like object.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin))
with open(filename) as f:
reader = serial.MolReader(f, mol_format='sdf')
mols = reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_compressed_file_like(self):
"""
Read from a file-like object using gzip.
"""
_, filename = tempfile.mkstemp(suffix='.sdf.gz', dir=self.temp_dir)
with gzip.open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin))
with gzip.open(filename) as f:
reader = serial.MolReader(f, mol_format='sdf')
mols = reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_read_multiple_sdf(self):
"""
Read a multiple-molecule SDF file.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in self.ref_mols:
sdf = Chem.MolToMolBlock(mol)
f.write(sdf)
f.write('$$$$\n') # add molecule delimiter
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 2
for i in xrange(len(mols)):
assert mols[i].ToBinary() == self.ref_mols[i].ToBinary()
def test_read_multiple_smiles(self):
"""
Read a multiple-molecule SMILES file.
"""
ref_mols = []
for mol in self.ref_mols:
mol = Chem.MolFromSmiles(Chem.MolToSmiles(mol))
ref_mols.append(mol)
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in self.ref_mols:
smiles = Chem.MolToSmiles(mol)
name = mol.GetProp('_Name')
f.write('{}\t{}\n'.format(smiles, name))
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 2
for i in xrange(len(mols)):
assert mols[i].ToBinary() == ref_mols[i].ToBinary()
def test_read_multiconformer(self):
"""
Read a multiconformer SDF file.
"""
# generate conformers
engine = conformers.ConformerGenerator(max_conformers=3,
pool_multiplier=1)
ref_mol = engine.generate_conformers(self.aspirin)
assert ref_mol.GetNumConformers() > 1
# write to disk
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for conf in ref_mol.GetConformers():
f.write(Chem.MolToMolBlock(ref_mol, confId=conf.GetId()))
f.write('$$$$\n') # add molecule delimiter
# compare
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 1
# FIXME get ToBinary test to work
# assert mols[0].ToBinary() == ref_mol.ToBinary()
assert Chem.MolToMolBlock(mols[0]) == Chem.MolToMolBlock(ref_mol)
def test_read_multiple_multiconformer(self):
"""
Read a multiconformer SDF file containing multiple molecules.
"""
# generate conformers
ref_mols = []
engine = conformers.ConformerGenerator(max_conformers=3,
pool_multiplier=1)
for mol in self.ref_mols:
expanded = engine.generate_conformers(mol)
assert expanded.GetNumConformers() > 1
ref_mols.append(expanded)
# write to disk
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in ref_mols:
for conf in mol.GetConformers():
f.write(Chem.MolToMolBlock(mol, includeStereo=1,
confId=conf.GetId()))
f.write('$$$$\n') # add molecule delimiter
# compare
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 2
for mol, ref_mol in zip(mols, ref_mols):
# FIXME get ToBinary test to work
# assert mol.ToBinary() == ref_mol.ToBinary()
assert Chem.MolToMolBlock(
mol, includeStereo=1) == Chem.MolToMolBlock(ref_mol,
includeStereo=1)
def test_are_same_molecule(self):
"""
Test MolReader.are_same_molecule.
"""
assert self.reader.are_same_molecule(self.aspirin, self.aspirin)
assert not self.reader.are_same_molecule(self.aspirin,
self.levalbuterol)
def test_no_remove_hydrogens(self):
"""
Test hydrogen retention.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin_h))
reader = serial.MolReader(remove_hydrogens=False, remove_salts=False)
reader.open(filename)
mols = reader.get_mols()
# FIXME get ToBinary test to work
# assert mols.next().ToBinary() == self.aspirin_h.ToBinary()
assert Chem.MolToMolBlock(mols.next()) == Chem.MolToMolBlock(
self.aspirin_h)
def test_remove_hydrogens(self):
"""
Test hydrogen removal.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
f.write(Chem.MolToMolBlock(self.aspirin_h))
reader = serial.MolReader(remove_hydrogens=True)
reader.open(filename)
mols = reader.get_mols()
assert mols.next().ToBinary() == self.aspirin.ToBinary()
def test_remove_salts(self):
"""
Test salt removal.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in [self.aspirin_sodium, self.levalbuterol_hcl]:
f.write(Chem.MolToMolBlock(mol))
f.write('$$$$\n') # molecule delimiter
ref_mols = [self.aspirin_sodium, self.levalbuterol_hcl]
self.reader = serial.MolReader(remove_salts=True)
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 2
for mol, ref_mol in zip(mols, ref_mols):
assert mol.GetNumAtoms() < ref_mol.GetNumAtoms()
desalted = self.reader.clean_mol(ref_mol)
assert mol.ToBinary() == desalted.ToBinary()
def test_no_remove_salts(self):
"""
Test salt retention.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in [self.aspirin_sodium, self.levalbuterol_hcl]:
f.write(Chem.MolToMolBlock(mol))
f.write('$$$$\n') # molecule delimiter
ref_mols = [self.aspirin_sodium, self.levalbuterol_hcl]
self.reader = serial.MolReader(remove_salts=False)
self.reader.open(filename)
mols = self.reader.get_mols()
mols = list(mols)
assert len(mols) == 2
self.reader = serial.MolReader(remove_salts=True)
for mol, ref_mol in zip(mols, ref_mols):
assert mol.ToBinary() == ref_mol.ToBinary()
desalted = self.reader.clean_mol(ref_mol)
assert mol.GetNumAtoms() > desalted.GetNumAtoms()
def test_iterator(self):
"""
Test MolWriter.__iter__.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in self.ref_mols:
f.write(Chem.MolToMolBlock(mol))
f.write('$$$$\n') # molecule delimiter
self.reader.open(filename)
for i, mol in enumerate(self.reader):
assert mol.ToBinary() == self.ref_mols[i].ToBinary()
def test_context_manager(self):
"""
Test using 'with' statement to read molecules.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with open(filename, 'wb') as f:
for mol in self.ref_mols:
f.write(Chem.MolToMolBlock(mol))
f.write('$$$$\n') # molecule delimiter
with self.reader.open(filename) as reader:
for i, mol in enumerate(reader):
assert mol.ToBinary() == self.ref_mols[i].ToBinary()
def test_skip_failures(self):
"""
Test skip read failures.
"""
smiles = 'CO(C)C'
reader = serial.MolReader(StringIO(smiles), 'smi')
mols = list(reader.get_mols())
assert len(mols) == 0
def test_is_a_salt(self):
"""
Test that a molecule that _is_ a salt is not returned empty.
"""
smiles = 'C(=CC(=O)O)C(=O)O'
reader = serial.MolReader(StringIO(smiles), 'smi', remove_salts=True)
mols = list(reader.get_mols())
assert len(mols) == 1 and mols[0].GetNumAtoms()
def test_read_multiple_pickles(self):
"""
Test reading a file containing multiple pickles. This can occur if
MolWriter.write is called multiple times.
"""
_, filename = tempfile.mkstemp(suffix='.pkl', dir=self.temp_dir)
with serial.MolWriter().open(filename) as writer:
writer.write([self.aspirin])
writer.write([self.levalbuterol])
with self.reader.open(filename) as reader:
mols = list(reader)
assert len(mols) == 2
assert mols[0].ToBinary() == self.aspirin.ToBinary()
assert mols[1].ToBinary() == self.levalbuterol.ToBinary()
class TestMolWriter(TestMolIO):
"""
Test MolWriter.
"""
def setUp(self):
"""
Add writer to inherited setup.
"""
super(TestMolWriter, self).setUp()
self.writer = serial.MolWriter()
self.aspirin_sdf = Chem.MolToMolBlock(self.aspirin)
self.aspirin_smiles = Chem.MolToSmiles(self.aspirin) + '\taspirin'
def test_write_sdf(self):
"""
Write an SDF file.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with open(filename) as f:
data = f.read()
assert data == self.aspirin_sdf + '$$$$\n'
def test_write_sdf_gz(self):
"""
Write a compressed SDF file.
"""
_, filename = tempfile.mkstemp(suffix='.sdf.gz', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with gzip.open(filename) as f:
data = f.read()
assert data == self.aspirin_sdf + '$$$$\n'
def test_write_smiles(self):
"""
Write a SMILES file.
"""
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
self.aspirin.RemoveAllConformers() # SMILES are read without confs
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with open(filename) as f:
data = f.read()
assert data.strip() == self.aspirin_smiles
def test_write_smiles_gz(self):
"""
Write a compressed SMILES file.
"""
_, filename = tempfile.mkstemp(suffix='.smi.gz', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
self.aspirin.RemoveAllConformers() # SMILES are read without confs
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with gzip.open(filename) as f:
data = f.read()
assert data.strip() == self.aspirin_smiles
def test_write_pickle(self):
"""
Write a pickle.
"""
_, filename = tempfile.mkstemp(suffix='.pkl', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with open(filename) as f:
data = f.read()
assert data == cPickle.dumps([self.aspirin],
cPickle.HIGHEST_PROTOCOL)
def test_write_pickle_gz(self):
"""
Write a compressed pickle.
"""
_, filename = tempfile.mkstemp(suffix='.pkl.gz', dir=self.temp_dir)
self.writer.open(filename)
self.writer.write([self.aspirin])
self.writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with gzip.open(filename) as f:
data = f.read()
assert data == cPickle.dumps([self.aspirin],
cPickle.HIGHEST_PROTOCOL)
def test_stereo_setup(self):
"""
Make sure chiral reference molecule is correct.
"""
smiles = Chem.MolToSmiles(self.levalbuterol, isomericSmiles=True)
assert '@' in smiles # check for stereochemistry flag
# check that removing stereochemistry changes the molecule
original = self.levalbuterol.ToBinary()
AllChem.RemoveStereochemistry(self.levalbuterol)
assert self.levalbuterol.ToBinary() != original
def test_stereo_sdf(self):
"""
Test stereochemistry preservation when writing to SDF.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
writer = serial.MolWriter(stereo=True)
writer.open(filename)
writer.write([self.levalbuterol])
writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == self.levalbuterol.ToBinary()
def test_stereo_smi(self):
"""
Test stereochemistry preservation when writing to SMILES.
"""
# FIXME avoid this and use self.levalbuterol.RemoveAllConformers()
ref_mol = Chem.MolFromSmiles(Chem.MolToSmiles(self.levalbuterol,
isomericSmiles=True))
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
writer = serial.MolWriter(stereo=True)
writer.open(filename)
writer.write([self.levalbuterol])
writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
assert mols.next().ToBinary() == ref_mol.ToBinary()
def test_no_stereo_sdf(self):
"""
Test stereochemistry removal when writing to SDF.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
writer = serial.MolWriter(stereo=False)
writer.open(filename)
writer.write([self.levalbuterol])
writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
mol = mols.next()
# make sure the written molecule differs from the reference
assert mol.ToBinary() != self.levalbuterol.ToBinary()
# check again after removing stereochemistry
AllChem.RemoveStereochemistry(self.levalbuterol)
# FIXME get ToBinary test to work
# assert mol.ToBinary() == self.levalbuterol.ToBinary()
assert Chem.MolToMolBlock(
mol, includeStereo=True) == Chem.MolToMolBlock(
self.levalbuterol, includeStereo=True)
def test_no_stereo_smiles(self):
"""
Test stereochemistry removal when writing to SMILES.
"""
_, filename = tempfile.mkstemp(suffix='.smi', dir=self.temp_dir)
writer = serial.MolWriter(stereo=False)
writer.open(filename)
writer.write([self.levalbuterol])
writer.close()
self.reader.open(filename)
mols = self.reader.get_mols()
mol = mols.next()
# make sure the written molecule differs from the reference
assert mol.ToBinary() != self.levalbuterol.ToBinary()
# check again after removing stereochemistry
AllChem.RemoveStereochemistry(self.levalbuterol)
# FIXME get ToBinary test to work
# assert mol.ToBinary() == self.levalbuterol.ToBinary()
assert Chem.MolToSmiles(mol, isomericSmiles=True) == Chem.MolToSmiles(
self.levalbuterol, isomericSmiles=True)
def test_context_manager(self):
"""
Test use of 'with' statement to write molecules.
"""
_, filename = tempfile.mkstemp(suffix='.sdf', dir=self.temp_dir)
with self.writer.open(filename) as writer:
writer.write([self.aspirin])
self.reader.open(filename)
mols = self.reader.get_mols()
# compare molecules
assert mols.next().ToBinary() == self.aspirin.ToBinary()
# compare files
with open(filename) as f:
data = f.read()
assert data == self.aspirin_sdf + '$$$$\n'
| rbharath/vs-utils | vs_utils/utils/rdkit_utils/tests/test_serial.py | Python | gpl-3.0 | 25,593 |
#!/usr/bin/env python
# -*- coding: UTF8 -*-
#
# Provides access to portaudio.
# Copyright (C) 2010 Josiah Gordon <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A portaudio module.
"""
__all__ = ['_portaudio']
| zepto/musio | musio/portaudio/__init__.py | Python | gpl-3.0 | 830 |
../../../../../../share/pyshared/ubuntu-sso-client/ubuntu_sso/xdg_base_directory/windows.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/ubuntu-sso-client/ubuntu_sso/xdg_base_directory/windows.py | Python | gpl-3.0 | 91 |
from __future__ import absolute_import
import requests
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.simplejson import dumps, loads
from common.models import Singleton
from lock_manager import Lock, LockError
from .literals import FORM_SUBMIT_URL, FORM_KEY, FORM_RECEIVER_FIELD, TIMEOUT
from .exceptions import AlreadyRegistered
class RegistrationSingleton(Singleton):
_cached_name = None
_registered = None
registered = models.BooleanField(default=False, verbose_name=_('registered'))
registration_data = models.TextField(verbose_name=_(u'registration data'), blank=True)
@classmethod
def registration_state(cls):
if cls._registered:
return cls._registered
else:
instance = cls.objects.get()
if instance.is_registered:
cls._registered = instance.is_registered
return instance.is_registered
@classmethod
def registered_name(cls):
if cls._cached_name:
return cls._cached_name
else:
instance = cls.objects.get()
try:
dictionary = loads(instance.registration_data)
except ValueError:
dictionary = {}
name_value = dictionary.get('company') or dictionary.get('name')
if name_value:
cls._cached_name = name_value
return name_value or _(u'No name')
@property
def is_registered(self):
return self.registered
def register(self, form):
from installation.models import Installation
if self.is_registered:
raise AlreadyRegistered
installation = Installation.objects.get()
dictionary = {}
dictionary.update(form.cleaned_data)
dictionary.update({
'uuid': installation.uuid
})
self.registration_data = dumps(dictionary)
self.save()
self.submit()
def submit(self):
try:
lock = Lock.acquire_lock('upload_registration')
except LockError:
pass
else:
try:
requests.post(FORM_SUBMIT_URL, data={'formkey': FORM_KEY, FORM_RECEIVER_FIELD: self.registration_data}, timeout=TIMEOUT)
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError):
pass
else:
self.registered = True
self.save()
finally:
lock.release()
class Meta:
verbose_name = verbose_name_plural = _(u'registration properties')
| appsembler/mayan_appsembler | apps/registration/models.py | Python | gpl-3.0 | 2,647 |
from six import BytesIO
from pathod.language import actions, parse_pathoc, parse_pathod, serve
def parse_request(s):
return next(parse_pathoc(s))
def test_unique_name():
assert not actions.PauseAt(0, "f").unique_name
assert actions.DisconnectAt(0).unique_name
class TestDisconnects:
def test_parse_pathod(self):
a = next(parse_pathod("400:d0")).actions[0]
assert a.spec() == "d0"
a = next(parse_pathod("400:dr")).actions[0]
assert a.spec() == "dr"
def test_at(self):
e = actions.DisconnectAt.expr()
v = e.parseString("d0")[0]
assert isinstance(v, actions.DisconnectAt)
assert v.offset == 0
v = e.parseString("d100")[0]
assert v.offset == 100
e = actions.DisconnectAt.expr()
v = e.parseString("dr")[0]
assert v.offset == "r"
def test_spec(self):
assert actions.DisconnectAt("r").spec() == "dr"
assert actions.DisconnectAt(10).spec() == "d10"
class TestInject:
def test_parse_pathod(self):
a = next(parse_pathod("400:ir,@100")).actions[0]
assert a.offset == "r"
assert a.value.datatype == "bytes"
assert a.value.usize == 100
a = next(parse_pathod("400:ia,@100")).actions[0]
assert a.offset == "a"
def test_at(self):
e = actions.InjectAt.expr()
v = e.parseString("i0,'foo'")[0]
assert v.value.val == b"foo"
assert v.offset == 0
assert isinstance(v, actions.InjectAt)
v = e.parseString("ir,'foo'")[0]
assert v.offset == "r"
def test_serve(self):
s = BytesIO()
r = next(parse_pathod("400:i0,'foo'"))
assert serve(r, s, {})
def test_spec(self):
e = actions.InjectAt.expr()
v = e.parseString("i0,'foo'")[0]
assert v.spec() == "i0,'foo'"
def test_spec2(self):
e = actions.InjectAt.expr()
v = e.parseString("i0,@100")[0]
v2 = v.freeze({})
v3 = v2.freeze({})
assert v2.value.val == v3.value.val
class TestPauses:
def test_parse_pathod(self):
e = actions.PauseAt.expr()
v = e.parseString("p10,10")[0]
assert v.seconds == 10
assert v.offset == 10
v = e.parseString("p10,f")[0]
assert v.seconds == "f"
v = e.parseString("pr,f")[0]
assert v.offset == "r"
v = e.parseString("pa,f")[0]
assert v.offset == "a"
def test_request(self):
r = next(parse_pathod('400:p10,10'))
assert r.actions[0].spec() == "p10,10"
def test_spec(self):
assert actions.PauseAt("r", 5).spec() == "pr,5"
assert actions.PauseAt(0, 5).spec() == "p0,5"
assert actions.PauseAt(0, "f").spec() == "p0,f"
def test_freeze(self):
l = actions.PauseAt("r", 5)
assert l.freeze({}).spec() == l.spec()
class Test_Action:
def test_cmp(self):
a = actions.DisconnectAt(0)
b = actions.DisconnectAt(1)
c = actions.DisconnectAt(0)
assert a < b
assert a == c
l = sorted([b, a])
assert l[0].offset == 0
def test_resolve(self):
r = parse_request('GET:"/foo"')
e = actions.DisconnectAt("r")
ret = e.resolve({}, r)
assert isinstance(ret.offset, int)
def test_repr(self):
e = actions.DisconnectAt("r")
assert repr(e)
def test_freeze(self):
l = actions.DisconnectAt(5)
assert l.freeze({}).spec() == l.spec()
| x2Ident/x2Ident_test | mitmproxy/test/pathod/test_language_actions.py | Python | gpl-3.0 | 3,653 |
"""Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup uses a pluggable XML or HTML parser to parse a
(possibly invalid) document into a tree representation. Beautiful Soup
provides provides methods and Pythonic idioms that make it easy to
navigate, search, and modify the parse tree.
Beautiful Soup works with Python 2.6 and up. It works better if lxml
and/or html5lib is installed.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
"""
__author__ = "Leonard Richardson ([email protected])"
__version__ = "4.0.4"
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import re
import warnings
from .builder import builder_registry
from .dammit import UnicodeDammit
from .element import (
CData,
Comment,
DEFAULT_OUTPUT_ENCODING,
Declaration,
Doctype,
NavigableString,
PageElement,
ProcessingInstruction,
ResultSet,
SoupStrainer,
Tag,
)
# The very first thing we do is give a useful error if someone is
# running this code under Python 3 without converting it.
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
class BeautifulSoup(Tag):
"""
This class defines the basic interface called by the tree builders.
These methods will be called by the parser:
reset()
feed(markup)
The tree builder may call these methods from its feed() implementation:
handle_starttag(name, attrs) # See note about return value
handle_endtag(name)
handle_data(data) # Appends to the current data node
endData(containerClass=NavigableString) # Ends the current data node
No matter how complicated the underlying parser is, you should be
able to build a tree using 'start tag' events, 'end tag' events,
'data' events, and "done with data" events.
If you encounter an empty-element tag (aka a self-closing tag,
like HTML's <br> tag), call handle_starttag and then
handle_endtag.
"""
ROOT_TAG_NAME = u'[document]'
# If the end-user gives no indication which tree builder they
# want, look for one with these features.
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
# Used when determining whether a text node is all whitespace and
# can be replaced with a single space. A text node that contains
# fancy Unicode spaces (usually non-breaking) should be left
# alone.
STRIP_ASCII_SPACES = {9: None, 10: None, 12: None, 13: None, 32: None, }
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, **kwargs):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser."""
if 'convertEntities' in kwargs:
warnings.warn(
"BS4 does not respect the convertEntities argument to the "
"BeautifulSoup constructor. Entities are always converted "
"to Unicode characters.")
if 'markupMassage' in kwargs:
del kwargs['markupMassage']
warnings.warn(
"BS4 does not respect the markupMassage argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for any necessary markup massage.")
if 'smartQuotesTo' in kwargs:
del kwargs['smartQuotesTo']
warnings.warn(
"BS4 does not respect the smartQuotesTo argument to the "
"BeautifulSoup constructor. Smart quotes are always converted "
"to Unicode characters.")
if 'selfClosingTags' in kwargs:
del kwargs['selfClosingTags']
warnings.warn(
"BS4 does not respect the selfClosingTags argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for understanding self-closing tags.")
if 'isHTML' in kwargs:
del kwargs['isHTML']
warnings.warn(
"BS4 does not respect the isHTML argument to the "
"BeautifulSoup constructor. You can pass in features='html' "
"or features='xml' to get a builder capable of handling "
"one or the other.")
def deprecated_argument(old_name, new_name):
if old_name in kwargs:
warnings.warn(
'The "%s" argument to the BeautifulSoup constructor '
'has been renamed to "%s."' % (old_name, new_name))
value = kwargs[old_name]
del kwargs[old_name]
return value
return None
parse_only = parse_only or deprecated_argument(
"parseOnlyThese", "parse_only")
from_encoding = from_encoding or deprecated_argument(
"fromEncoding", "from_encoding")
if len(kwargs) > 0:
arg = kwargs.keys().pop()
raise TypeError(
"__init__() got an unexpected keyword argument '%s'" % arg)
if builder is None:
if isinstance(features, basestring):
features = [features]
if features is None or len(features) == 0:
features = self.DEFAULT_BUILDER_FEATURES
builder_class = builder_registry.lookup(*features)
if builder_class is None:
raise ValueError(
"Couldn't find a tree builder with the features you "
"requested: %s. Do you need to install a parser library?"
% ",".join(features))
builder = builder_class()
self.builder = builder
self.is_xml = builder.is_xml
self.builder.soup = self
self.parse_only = parse_only
self.reset()
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
(self.markup, self.original_encoding, self.declared_html_encoding,
self.contains_replacement_characters) = (
self.builder.prepare_markup(markup, from_encoding))
try:
self._feed()
except StopParsing:
pass
# Clear out the markup and remove the builder's circular
# reference to this object.
self.markup = None
self.builder.soup = None
def _feed(self):
# Convert the document to Unicode.
self.builder.reset()
self.builder.feed(self.markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def reset(self):
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
self.hidden = 1
self.builder.reset()
self.currentData = []
self.currentTag = None
self.tagStack = []
self.pushTag(self)
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
"""Create a new tag associated with this soup."""
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
def new_string(self, s):
"""Create a new NavigableString associated with this soup."""
navigable = NavigableString(s)
navigable.setup()
return navigable
def insert_before(self, successor):
raise ValueError("BeautifulSoup objects don't support insert_before().")
def insert_after(self, successor):
raise ValueError("BeautifulSoup objects don't support insert_after().")
def popTag(self):
tag = self.tagStack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
def endData(self, containerClass=NavigableString):
if self.currentData:
currentData = u''.join(self.currentData)
if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and
not set([tag.name for tag in self.tagStack]).intersection(
self.builder.preserve_whitespace_tags)):
if '\n' in currentData:
currentData = '\n'
else:
currentData = ' '
self.currentData = []
if self.parse_only and len(self.tagStack) <= 1 and \
(not self.parse_only.text or \
not self.parse_only.search(currentData)):
return
o = containerClass(currentData)
self.object_was_parsed(o)
def object_was_parsed(self, o):
"""Add an object to the parse tree."""
o.setup(self.currentTag, self.previous_element)
if self.previous_element:
self.previous_element.next_element = o
self.previous_element = o
self.currentTag.contents.append(o)
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
numPops = 0
mostRecentTag = None
for i in range(len(self.tagStack) - 1, 0, -1):
if (name == self.tagStack[i].name
and nsprefix == self.tagStack[i].nsprefix == nsprefix):
numPops = len(self.tagStack) - i
break
if not inclusivePop:
numPops = numPops - 1
for i in range(0, numPops):
mostRecentTag = self.popTag()
return mostRecentTag
def handle_starttag(self, name, namespace, nsprefix, attrs):
"""Push a start tag on to the stack.
If this method returns None, the tag was rejected by the
SoupStrainer. You should proceed as if the tag had not occured
in the document. For instance, if this was a self-closing tag,
don't call handle_endtag.
"""
# print "Start tag %s: %s" % (name, attrs)
self.endData()
if (self.parse_only and len(self.tagStack) <= 1
and (self.parse_only.text
or not self.parse_only.search_tag(name, attrs))):
return None
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
self.currentTag, self.previous_element)
if tag is None:
return tag
if self.previous_element:
self.previous_element.next_element = tag
self.previous_element = tag
self.pushTag(tag)
return tag
def handle_endtag(self, name, nsprefix=None):
#print "End tag: " + name
self.endData()
self._popToTag(name, nsprefix)
def handle_data(self, data):
self.currentData.append(data)
def decode(self, pretty_print=False,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a string or Unicode representation of this document.
To get Unicode, pass None for encoding."""
if self.is_xml:
# Print the XML declaration
encoding_part = ''
if eventual_encoding != None:
encoding_part = ' encoding="%s"' % eventual_encoding
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
else:
prefix = u''
if not pretty_print:
indent_level = None
else:
indent_level = 0
return prefix + super(BeautifulSoup, self).decode(
indent_level, eventual_encoding, formatter)
class BeautifulStoneSoup(BeautifulSoup):
"""Deprecated interface to an XML parser."""
def __init__(self, *args, **kwargs):
kwargs['features'] = 'xml'
warnings.warn(
'The BeautifulStoneSoup class is deprecated. Instead of using '
'it, pass features="xml" into the BeautifulSoup constructor.')
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
class StopParsing(Exception):
pass
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print soup.prettify()
| heldergg/dre | lib/bs4/__init__.py | Python | gpl-3.0 | 12,881 |
#!/usr/bin/env python3
# encoding: utf-8
from nose import with_setup
from tests.utils import *
@with_setup(usual_setup_func, usual_teardown_func)
def test_negative():
create_file('xxx', 'b.png')
create_file('xxx', 'a.png')
create_file('xxx', 'a')
head, *data, footer = run_rmlint('-i')
assert footer['total_files'] == 3
assert footer['total_lint_size'] == 0
assert footer['duplicates'] == 0
@with_setup(usual_setup_func, usual_teardown_func)
def test_positive():
create_file('xxx', 'a.png')
create_file('xxx', 'a.jpg')
head, *data, footer = run_rmlint('-i')
assert footer['total_files'] == 2
assert footer['total_lint_size'] == 3
assert footer['duplicates'] == 1
| SeeSpotRun/rmlint | tests/test_options/test_match_without_extension.py | Python | gpl-3.0 | 718 |
# -*- coding: utf-8 -*-
"""Copyright (c) 2009 Sergio Gabriel Teves
All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django import template
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
from django.utils.safestring import mark_safe
import string
import re
register = template.Library()
@register.inclusion_tag('tags/smileysrender.html')
def smileyrender():
list = []
for item in settings.SMILEYS_PACK:
list.append({'code': item[0],'img': settings.SMILEYS_ROOT + item[1]})
return {'list': list}
@register.filter
def smiley(value):
img = '<img src="%s"/>'
for item in settings.SMILEYS_PACK:
code = string.replace(string.replace(item[0], "(", "\("),")","\)")
pattern = r"(?i)%s" % code
value = re.sub(pattern, img % (settings.SMILEYS_ROOT + item[1]),value)
return mark_safe(value)
@register.filter
def bbcode(value):
"""
Generates (X)HTML from string with BBCode "markup".
By using the postmark lib from:
@see: http://code.google.com/p/postmarkup/
"""
try:
from postmarkup import render_bbcode
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% bbcode %} filter: The Python postmarkup library isn't installed."
return force_unicode(value)
else:
return mark_safe(render_bbcode(value))
bbcode.is_save = True
@register.filter
def strip_bbcode(value):
"""
Strips BBCode tags from a string
By using the postmark lib from:
@see: http://code.google.com/p/postmarkup/
"""
try:
from postmarkup import strip_bbcode
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% bbcode %} filter: The Python postmarkup library isn't installed."
return force_unicode(value)
else:
return mark_safe(strip_bbcode(value))
bbcode.is_save = True | dahool/vertaal | djangoapp/templatetags/bbcodetag.py | Python | gpl-3.0 | 2,634 |
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
from collections import OrderedDict
import datetime
import io
import os.path
import re
from . import helpers as metadata_helpers
from .. import logger
import sg_helpers
from ..indexers import indexer_config
from ..indexers.indexer_config import TVINFO_TVDB, TVINFO_TMDB
from lib.tvinfo_base import TVInfoImage, TVInfoImageType, TVInfoImageSize
from lib.tvinfo_base.exceptions import *
import sickbeard
# noinspection PyPep8Naming
import encodingKludge as ek
from exceptions_helper import ex
from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
from lxml_etree import etree
from _23 import filter_iter, list_keys
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict, Generator, List, Optional, Tuple, Union
from lib.tvinfo_base import TVInfoShow
from ..tv import TVShow
map_image_types = {
'poster': TVInfoImageType.poster,
'banner': TVInfoImageType.banner,
'fanart': TVInfoImageType.fanart,
'poster_thumb': TVInfoImageType.poster,
'banner_thumb': TVInfoImageType.banner,
}
class ShowInfosDict(OrderedDict):
def __getitem__(self, k):
v = OrderedDict.__getitem__(self, k)
if callable(v):
v = v(k)
OrderedDict.__setitem__(self, k, v)
return v
class GenericMetadata(object):
"""
Base class for all metadata providers. Default behavior is meant to mostly
follow XBMC 12+ metadata standards. Has support for:
- show metadata file
- episode metadata file
- episode thumbnail
- show fanart
- show poster
- show banner
- season thumbnails (poster)
- season thumbnails (banner)
- season all poster
- season all banner
"""
def __init__(self,
show_metadata=False, # type: bool
episode_metadata=False, # type: bool
use_fanart=False, # type: bool
use_poster=False, # type: bool
use_banner=False, # type: bool
episode_thumbnails=False, # type: bool
season_posters=False, # type: bool
season_banners=False, # type: bool
season_all_poster=False, # type: bool
season_all_banner=False # type: bool
):
self.name = "Generic" # type: AnyStr
self._ep_nfo_extension = "nfo" # type: AnyStr
self._show_metadata_filename = "tvshow.nfo" # type: AnyStr
self.fanart_name = "fanart.jpg" # type: AnyStr
self.poster_name = "poster.jpg" # type: AnyStr
self.banner_name = "banner.jpg" # type: AnyStr
self.season_all_poster_name = "season-all-poster.jpg" # type: AnyStr
self.season_all_banner_name = "season-all-banner.jpg" # type: AnyStr
self.show_metadata = show_metadata
self.episode_metadata = episode_metadata
self.fanart = use_fanart
self.poster = use_poster
self.banner = use_banner
self.episode_thumbnails = episode_thumbnails
self.season_posters = season_posters
self.season_banners = season_banners
self.season_all_poster = season_all_poster
self.season_all_banner = season_all_banner
def get_config(self):
# type: (...) -> AnyStr
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner,
self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster,
self.season_all_banner]
return '|'.join([str(int(x)) for x in config_list])
def get_id(self):
# type: (...) -> AnyStr
return GenericMetadata.makeID(self.name)
@staticmethod
def makeID(name):
# type: (AnyStr) -> AnyStr
name_id = re.sub("[+]", "plus", name)
name_id = re.sub(r"[^\w\d_]", "_", name_id).lower()
return name_id
def set_config(self, string):
# type: (AnyStr) -> None
config_list = [bool(int(x)) for x in string.split('|')]
self.show_metadata = config_list[0]
self.episode_metadata = config_list[1]
self.fanart = config_list[2]
self.poster = config_list[3]
self.banner = config_list[4]
self.episode_thumbnails = config_list[5]
self.season_posters = config_list[6]
self.season_banners = config_list[7]
self.season_all_poster = config_list[8]
self.season_all_banner = config_list[9]
def _has_show_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_metadata(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj))
logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_poster_path(show_obj))
logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_thumb(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
location = self.get_episode_thumb_path(ep_obj)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_poster(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_poster_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_banner(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_banner_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
def _has_season_all_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
@staticmethod
def get_show_year(show_obj, show_info, year_only=True):
# type: (sickbeard.tv.TVShow, Dict, bool) -> Optional[AnyStr]
if None is not getattr(show_info, 'firstaired', None):
try:
first_aired = datetime.datetime.strptime(show_info['firstaired'], '%Y-%m-%d')
if first_aired:
if year_only:
return str(first_aired.year)
return str(first_aired.date())
except (BaseException, Exception):
pass
if isinstance(show_obj, sickbeard.tv.TVShow):
if year_only and show_obj.startyear:
return '%s' % show_obj.startyear
if not show_obj.sxe_ep_obj.get(1, {}).get(1, None):
show_obj.get_all_episodes()
try:
first_ep_obj = show_obj.first_aired_regular_episode
except (BaseException, Exception):
first_ep_obj = None
if isinstance(first_ep_obj, sickbeard.tv.TVEpisode) \
and isinstance(first_ep_obj.airdate, datetime.date) and 1900 < first_ep_obj.airdate.year:
return '%s' % (first_ep_obj.airdate.year, first_ep_obj.airdate)[not year_only]
def get_show_file_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self._show_metadata_filename)
def get_episode_file_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
return sg_helpers.replace_extension(ep_obj.location, self._ep_nfo_extension)
def get_fanart_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.fanart_name)
def get_poster_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.poster_name)
def get_banner_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.banner_name)
def get_episode_thumb_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[AnyStr]
"""
Returns the path where the episode thumbnail should be stored.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = ep_obj.location.rpartition('.')
if '' == tbn_filename[0]:
tbn_filename = ep_obj.location
else:
tbn_filename = tbn_filename[0]
return tbn_filename + '-thumb.jpg'
def get_season_poster_path(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> AnyStr
"""
Returns the full path to the file for a given season poster.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.
"""
# Our specials thumbnail is, well, special
if 0 == season:
season_poster_filename = 'season-specials'
else:
season_poster_filename = 'season' + str(season).zfill(2)
return ek.ek(os.path.join, show_obj.location, season_poster_filename + '-poster.jpg')
def get_season_banner_path(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> AnyStr
"""
Returns the full path to the file for a given season banner.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.
"""
# Our specials thumbnail is, well, special
if 0 == season:
season_banner_filename = 'season-specials'
else:
season_banner_filename = 'season' + str(season).zfill(2)
return ek.ek(os.path.join, show_obj.location, season_banner_filename + '-banner.jpg')
def get_season_all_poster_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.season_all_poster_name)
def get_season_all_banner_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name)
def _show_data(self, show_obj):
# type: (sickbeard.tv.TVShow) -> Optional[Union[bool, etree.Element]]
"""
This should be overridden by the implementing class. It should
provide the content of the show metadata file.
"""
return None
@staticmethod
def _valid_show(fetched_show_info, show_obj):
# type: (Dict, sickbeard.tv.TVShow) -> bool
"""
Test the integrity of fetched show data
:param fetched_show_info: the object returned from the tvinfo source
:param show_obj: Show that the fetched data relates to
:return: True if fetched_show_obj is valid data otherwise False
"""
if not (isinstance(fetched_show_info, dict) and
isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and
'seriesname' in getattr(fetched_show_info, 'data', [])) and \
not hasattr(fetched_show_info, 'seriesname'):
logger.log(u'Show %s not found on %s ' %
(show_obj.name, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.WARNING)
return False
return True
def _ep_data(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[Union[bool, etree.Element]]
"""
This should be overridden by the implementing class. It should
provide the content of the episode metadata file.
"""
return None
def create_show_metadata(self, show_obj, force=False):
# type: (sickbeard.tv.TVShow, bool) -> bool
result = False
if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or force):
logger.debug('Metadata provider %s creating show metadata for %s' % (self.name, show_obj.unique_name))
try:
result = self.write_show_file(show_obj)
except BaseTVinfoError as e:
logger.log('Unable to find useful show metadata for %s on %s: %s' % (
self.name, sickbeard.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING)
return result
def create_episode_metadata(self, ep_obj, force=False):
# type: (sickbeard.tv.TVEpisode, bool) -> bool
result = False
if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force):
logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()),
logger.DEBUG)
try:
result = self.write_ep_file(ep_obj)
except BaseTVinfoError as e:
logger.log('Unable to find useful episode metadata for %s on %s: %s' % (
self.name, sickbeard.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.WARNING)
return result
def update_show_indexer_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
logger.debug(u'Metadata provider %s updating show indexer metadata file for %s' % (
self.name, show_obj.unique_name))
nfo_file_path = self.get_show_file_path(show_obj)
with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
show_xml = etree.ElementTree(file=xmlFileObj)
tvid = show_xml.find('indexer')
prodid = show_xml.find('id')
root = show_xml.getroot()
show_tvid = str(show_obj.tvid)
if None is not tvid:
tvid.text = '%s' % show_tvid
else:
etree.SubElement(root, 'indexer').text = '%s' % show_tvid
show_prodid = str(show_obj.prodid)
if None is not prodid:
prodid.text = '%s' % show_prodid
else:
etree.SubElement(root, 'id').text = '%s' % show_prodid
# Make it purdy
sg_helpers.indent_xml(root)
sg_helpers.write_file(nfo_file_path, show_xml, xmltree=True, utf8=True)
return True
def create_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.fanart and show_obj and not self._has_fanart(show_obj):
logger.debug(u'Metadata provider %s creating fanart for %s' % (self.name, show_obj.unique_name))
return self.save_fanart(show_obj)
return False
def create_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.poster and show_obj and not self._has_poster(show_obj):
logger.debug(u'Metadata provider %s creating poster for %s' % (self.name, show_obj.unique_name))
return self.save_poster(show_obj)
return False
def create_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.banner and show_obj and not self._has_banner(show_obj):
logger.debug(u'Metadata provider %s creating banner for %s' % (self.name, show_obj.unique_name))
return self.save_banner(show_obj)
return False
def create_episode_thumb(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj):
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(),
logger.DEBUG)
return self.save_thumbnail(ep_obj)
return False
def create_season_posters(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_posters and show_obj:
result = []
for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_poster(show_obj, season):
logger.debug(u'Metadata provider %s creating season posters for %s' % (
self.name, show_obj.unique_name))
result = result + [self.save_season_posters(show_obj, season)]
return all(result)
return False
def create_season_banners(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_banners and show_obj:
result = []
for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_banner(show_obj, season):
logger.debug(u'Metadata provider %s creating season banners for %s' % (
self.name, show_obj.unique_name))
result = result + [self.save_season_banners(show_obj, season)]
return all(result)
return False
def create_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
logger.debug(u'Metadata provider %s creating season all posters for %s' % (
self.name, show_obj.unique_name))
return self.save_season_all_poster(show_obj)
return False
def create_season_all_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
logger.debug(u'Metadata provider %s creating season all banner for %s' % (
self.name, show_obj.unique_name))
return self.save_season_all_banner(show_obj)
return False
@staticmethod
def _get_episode_thumb_url(ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[AnyStr]
"""
Returns the URL to use for downloading an episode's thumbnail. Uses
theTVDB.com and TVRage.com data.
:param ep_obj: a TVEpisode object for which to grab the thumb URL
:return: URL to thumb
"""
ep_obj_list = [ep_obj] + ep_obj.related_ep_obj
# validate show
from .. import helpers
if not helpers.validate_show(ep_obj.show_obj):
return None
# try all included episodes in case some have thumbs and others don't
for cur_ep_obj in ep_obj_list:
if TVINFO_TVDB == cur_ep_obj.show_obj.tvid:
show_lang = cur_ep_obj.show_obj.lang
try:
tvinfo_config = sickbeard.TVInfoAPI(TVINFO_TVDB).api_params.copy()
tvinfo_config['dvdorder'] = 0 != cur_ep_obj.show_obj.dvdorder
tvinfo_config['no_dummy'] = True
if show_lang and not 'en' == show_lang:
tvinfo_config['language'] = show_lang
t = sickbeard.TVInfoAPI(TVINFO_TVDB).setup(**tvinfo_config)
ep_info = t[cur_ep_obj.show_obj.prodid][cur_ep_obj.season][cur_ep_obj.episode]
except (BaseTVinfoEpisodenotfound, BaseTVinfoSeasonnotfound, TypeError):
ep_info = None
else:
ep_info = helpers.validate_show(cur_ep_obj.show_obj, cur_ep_obj.season, cur_ep_obj.episode)
if not ep_info:
continue
thumb_url = getattr(ep_info, 'filename', None) \
or (isinstance(ep_info, dict) and ep_info.get('filename', None))
if thumb_url not in (None, False, ''):
return thumb_url
return None
def write_show_file(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
"""
Generates and writes show_obj's metadata under the given path to the
filename given by get_show_file_path()
show_obj: TVShow object for which to create the metadata
path: An absolute or relative path where we should put the file. Note that
the file name will be the default show_file_name.
Note that this method expects that _show_data will return an ElementTree
object. If your _show_data returns data in another format you'll need to
override this method.
"""
data = self._show_data(show_obj)
if not data:
return False
nfo_file_path = self.get_show_file_path(show_obj)
logger.log(u'Writing show metadata file: %s' % nfo_file_path, logger.DEBUG)
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
def write_ep_file(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
"""
Generates and writes ep_obj's metadata under the given path with the
given filename root. Uses the episode's name with the extension in
_ep_nfo_extension.
ep_obj: TVEpisode object for which to create the metadata
file_name_path: The file name to use for this metadata. Note that the extension
will be automatically added based on _ep_nfo_extension. This should
include an absolute path.
Note that this method expects that _ep_data will return an ElementTree
object. If your _ep_data returns data in another format you'll need to
override this method.
"""
data = self._ep_data(ep_obj)
if not data:
return False
nfo_file_path = self.get_episode_file_path(ep_obj)
logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG)
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
def save_thumbnail(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
"""
Retrieves a thumbnail and saves it to the correct spot. This method should not need to
be overridden by implementing classes, changing get_episode_thumb_path and
_get_episode_thumb_url should suffice.
ep_obj: a TVEpisode object for which to generate a thumbnail
"""
file_path = self.get_episode_thumb_path(ep_obj)
if not file_path:
logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG)
return False
thumb_url = self._get_episode_thumb_url(ep_obj)
# if we can't find one then give up
if not thumb_url:
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG)
return False
thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name)
result = self._write_image(thumb_data, file_path)
if not result:
return False
for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
cur_ep_obj.hastbn = True
return True
def save_fanart(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a fanart image and saves it to the filename specified by fanart_name
inside the show's root folder.
show_obj: a TVShow object for which to download fanart
"""
# use the default fanart name
fanart_path = self.get_fanart_path(show_obj)
fanart_data = self._retrieve_show_image('fanart', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.FANART)
if not fanart_data:
logger.log(u"No fanart image was retrieved, unable to write fanart", logger.DEBUG)
return False
return self._write_image(fanart_data, fanart_path)
def save_poster(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a poster image and saves it to the filename specified by poster_name
inside the show's root folder.
show_obj: a TVShow object for which to download a poster
"""
# use the default poster name
poster_path = self.get_poster_path(show_obj)
poster_data = self._retrieve_show_image('poster', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.POSTER)
if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write poster", logger.DEBUG)
return False
return self._write_image(poster_data, poster_path)
def save_banner(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a banner image and saves it to the filename specified by banner_name
inside the show's root folder.
show_obj: a TVShow object for which to download a banner
"""
# use the default banner name
banner_path = self.get_banner_path(show_obj)
banner_data = self._retrieve_show_image('banner', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.BANNER)
if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write banner", logger.DEBUG)
return False
return self._write_image(banner_data, banner_path)
def save_season_posters(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> bool
"""
Saves all season posters to disk for the given show.
show_obj: a TVShow object for which to save the season thumbs
Cycles through all seasons and saves the season posters if possible.
"""
season_dict = self._season_image_dict(show_obj, season, 'seasons')
result = []
# Returns a nested dictionary of season art with the season
# number as primary key. It's really overkill but gives the option
# to present to user via ui to pick down the road.
for cur_season in season_dict:
cur_season_art = season_dict[cur_season]
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
art_id, season_url = cur_season_art.popitem()
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
if not season_poster_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season',
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season poster data available, skipping this season', logger.DEBUG)
continue
result = result + [self._write_image(season_data, season_poster_file_path)]
if result:
return all(result)
return False
def save_season_banners(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> bool
"""
Saves all season banners to disk for the given show.
show_obj: a TVShow object for which to save the season thumbs
Cycles through all seasons and saves the season banners if possible.
"""
season_dict = self._season_image_dict(show_obj, season, 'seasonwides')
result = []
# Returns a nested dictionary of season art with the season
# number as primary key. It's really overkill but gives the option
# to present to user via ui to pick down the road.
for cur_season in season_dict:
cur_season_art = season_dict[cur_season]
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
art_id, season_url = cur_season_art.popitem()
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
if not season_banner_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season',
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season banner data available, skipping this season', logger.DEBUG)
continue
result = result + [self._write_image(season_data, season_banner_file_path)]
if result:
return all(result)
return False
def save_season_all_poster(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
# use the default season all poster name
poster_path = self.get_season_all_poster_path(show_obj)
poster_data = self._retrieve_show_image('poster', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.POSTER)
if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write season all poster", logger.DEBUG)
return False
return self._write_image(poster_data, poster_path)
def save_season_all_banner(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
# use the default season all banner name
banner_path = self.get_season_all_banner_path(show_obj)
banner_data = self._retrieve_show_image('banner', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.BANNER)
if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write season all banner", logger.DEBUG)
return False
return self._write_image(banner_data, banner_path)
@staticmethod
def _write_image(image_data, image_path, force=False):
# type: (bytes, AnyStr, bool) -> bool
"""
Saves the data in image_data to the location image_path. Returns True/False
to represent success or failure.
image_data: binary image data to write to file
image_path: file location to save the image to
"""
# don't bother overwriting it
if not force and ek.ek(os.path.isfile, image_path):
logger.log(u"Image already exists, not downloading", logger.DEBUG)
return False
if not image_data:
logger.log(u"Unable to retrieve image, skipping", logger.WARNING)
return False
image_dir = ek.ek(os.path.dirname, image_path)
try:
if not ek.ek(os.path.isdir, image_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG)
ek.ek(os.makedirs, image_dir)
sg_helpers.chmod_as_parent(image_dir)
outFile = ek.ek(open, image_path, 'wb')
outFile.write(image_data)
outFile.close()
sg_helpers.chmod_as_parent(image_path)
except IOError as e:
logger.log(
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
logger.ERROR)
return False
return True
@staticmethod
def gen_show_infos_dict(show_obj):
# type: (TVShow) -> ShowInfosDict
show_infos = ShowInfosDict()
def _get_show_info(tv_id):
try:
show_lang = show_obj.lang
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
tvinfo_config = sickbeard.TVInfoAPI(tv_id).api_params.copy()
tvinfo_config['fanart'] = True
tvinfo_config['posters'] = True
tvinfo_config['banners'] = True
tvinfo_config['dvdorder'] = 0 != show_obj.dvdorder
if show_lang and not 'en' == show_lang:
tvinfo_config['language'] = show_lang
t = sickbeard.TVInfoAPI(tv_id).setup(**tvinfo_config)
return t.get_show((show_obj.ids[tv_id]['id'], show_obj.prodid)[tv_src == show_obj.tvid],
load_episodes=False, banners=False, posters=False, fanart=True)
except (BaseTVinfoError, IOError) as e:
logger.log(u"Unable to look up show on " + sickbeard.TVInfoAPI(
tv_id).name + ", not downloading images: " + ex(e), logger.WARNING)
# todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB
for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickbeard.TVInfoAPI().search_sources) +
[TVINFO_TMDB])):
if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'):
continue
if tv_src == show_obj.tvid:
show_infos[tv_src] = _get_show_info(tv_src)
else:
show_infos[tv_src] = _get_show_info
return show_infos
def _retrieve_image_urls(self, show_obj, image_type, show_infos):
# type: (TVShow, AnyStr, TVInfoShow) -> Generator
image_urls, alt_tvdb_urls, fanart_fetched, de_dupe, show_lang = [], [], False, set(), show_obj.lang
def build_url(s_o, image_mode):
_urls = [[], []]
_url = s_o[image_mode]
if _url and _url.startswith('http'):
if 'poster' == image_mode:
_url = re.sub('posters', '_cache/posters', _url)
elif 'banner' == image_mode:
_url = re.sub('graphical', '_cache/graphical', _url)
_urls[0].append(_url)
try:
alt_url = '%swww.%s%s' % re.findall(
r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(?:_t)(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
try:
alt_url = '%sartworks.%s_t%s' % re.findall(
r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
pass
return _urls
def _get_fanart_tv():
return [_de_dupe((f_item[2], (f_item[2], f_item[2]))[image_type in ('poster', 'banner')])
for f_item in self._fanart_urls_from_show(show_obj, image_type, show_lang) or []]
def _de_dupe(images_list):
# type:(Union[List[AnyStr], AnyStr]) -> Optional[Union[List[AnyStr], AnyStr]]
if not isinstance(images_list, list):
return_list = False
temp_list = [images_list]
else:
return_list = True
temp_list = images_list
images_list = [i for i in temp_list if i not in de_dupe]
[de_dupe.add(_i) for _i in images_list]
if not return_list:
if images_list:
return images_list[0]
return None
return images_list
if image_type.startswith('fanart'):
for r in _get_fanart_tv():
yield r
for tv_src in show_infos:
if not self._valid_show(show_infos[tv_src], show_obj):
continue
if 'poster_thumb' == image_type:
if None is not getattr(show_infos[tv_src], image_type, None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], image_type)
elif None is not getattr(show_infos[tv_src], 'poster', None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], 'poster')
image_urls, alt_tvdb_urls = _de_dupe(image_urls), _de_dupe(alt_tvdb_urls)
for item in image_urls + alt_tvdb_urls:
yield item
elif 'banner_thumb' == image_type:
if None is not getattr(show_infos[tv_src], image_type, None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], image_type)
elif None is not getattr(show_infos[tv_src], 'banner', None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], 'banner')
image_urls, alt_tvdb_urls = _de_dupe(image_urls), _de_dupe(alt_tvdb_urls)
for item in image_urls + alt_tvdb_urls:
yield item
else:
if None is not getattr(show_infos[tv_src], image_type, None):
image_url = show_infos[tv_src][image_type]
if image_type in ('poster', 'banner'):
if None is not getattr(show_infos[tv_src], '%s_thumb' % image_type, None):
thumb_url = show_infos[tv_src]['%s_thumb' % image_type]
else:
thumb_url = image_url
else:
thumb_url = None
if image_url:
r = _de_dupe(((image_url, thumb_url), image_url)[None is thumb_url])
if r:
yield r
# check extra provided images in '_banners' key
if None is not getattr(show_infos[tv_src], '_banners', None) and \
isinstance(show_infos[tv_src]['_banners'].get(image_type, None), (list, dict)):
for res, value in iteritems(show_infos[tv_src]['_banners'][image_type]):
for item in itervalues(value):
thumb = item['thumbnailpath']
if not thumb:
thumb = item['bannerpath']
r = _de_dupe((item['bannerpath'], (item['bannerpath'], thumb))[
image_type in ('poster', 'banner')])
if r:
yield r
# extra images via images property
tvinfo_type = map_image_types.get(image_type)
tvinfo_size = (TVInfoImageSize.original, TVInfoImageSize.medium)['_thumb' in image_type]
if tvinfo_type and getattr(show_infos[tv_src], 'images', None) and \
show_infos[tv_src].images.get(tvinfo_type):
for img in show_infos[tv_src].images[tvinfo_type]: # type: TVInfoImage
for img_size, img_url in iteritems(img.sizes):
if tvinfo_size == img_size:
img_url = _de_dupe(img_url)
if not img_url:
continue
if image_type in ('poster', 'banner'):
thumb_url = img.sizes.get(TVInfoImageSize.medium, img_url)
if thumb_url:
thumb_url = _de_dupe(thumb_url)
if not thumb_url:
thumb_url = img_url
yield (img_url, thumb_url)
elif img_url:
yield img_url
if not image_type.startswith('fanart'):
for r in _get_fanart_tv():
yield r
def _retrieve_show_image(self,
image_type, # type: AnyStr
show_obj, # type: sickbeard.tv.TVShow
which=None, # type: int
return_links=False, # type: bool
show_infos=None, # type: ShowInfosDict
img_cache_type=None # type: int
):
# type: (...) -> Optional[bytes, List[AnyStr]]
"""
Gets an image URL from theTVDB.com, fanart.tv and TMDB.com, downloads it and returns the data.
If type is fanart, multiple image src urls are returned instead of a single data image.
image_type: type of image to retrieve (currently supported: fanart, poster, banner, poster_thumb, banner_thumb)
show_obj: a TVShow object to use when searching for the image
which: optional, a specific numbered poster to look for
Returns: the binary image data if available, or else None
"""
if not show_infos:
show_infos = self.gen_show_infos_dict(show_obj)
if 'fanart_all' == image_type:
return_links = True
image_type = 'fanart'
if image_type not in ('poster', 'banner', 'fanart', 'poster_thumb', 'banner_thumb'):
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.TVInfoAPI(
show_obj.tvid).name + " object", logger.ERROR)
return
image_urls = self._retrieve_image_urls(show_obj, image_type, show_infos)
if image_urls:
if return_links:
return image_urls
else:
img_data = None
image_cache = sickbeard.image_cache.ImageCache()
for image_url in image_urls or []:
if image_type in ('poster', 'banner'):
if isinstance(image_url, tuple):
image_url = image_url[0]
img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name)
if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True):
img_data = None
continue
if None is not img_data:
break
if None is not img_data:
return img_data
def _season_image_dict(self, show_obj, season, image_type):
# type: (sickbeard.tv.TVShow, int, AnyStr) -> Dict[int, Dict[int, AnyStr]]
"""
image_type : Type of image to fetch, 'seasons' or 'seasonwides'
image_type type : String
Should return a dict like:
result = {<season number>:
{1: '<url 1>', 2: <url 2>, ...},}
"""
result = {}
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
tvinfo_config = sickbeard.TVInfoAPI(show_obj.tvid).api_params.copy()
tvinfo_config[image_type] = True
tvinfo_config['dvdorder'] = 0 != show_obj.dvdorder
if 'en' != getattr(show_obj, 'lang', None):
tvinfo_config['language'] = show_obj.lang
t = sickbeard.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config)
tvinfo_obj_show = t[show_obj.prodid]
except (BaseTVinfoError, IOError) as e:
logger.log(u'Unable to look up show on ' + sickbeard.TVInfoAPI(
show_obj.tvid).name + ', not downloading images: ' + ex(e), logger.WARNING)
return result
if not self._valid_show(tvinfo_obj_show, show_obj):
return result
season_images = getattr(tvinfo_obj_show, 'banners', {}).get(
('season', 'seasonwide')['seasonwides' == image_type], {}).get(season, {})
for image_id in season_images:
if season not in result:
result[season] = {}
result[season][image_id] = season_images[image_id]['bannerpath']
return result
def retrieveShowMetadata(self, folder):
# type: (AnyStr) -> Union[Tuple[int, int, AnyStr], Tuple[None, None, None]]
"""
Used only when mass adding Existing Shows,
using previously generated Show metadata to reduce the need to query TVDB.
"""
from sickbeard.indexers.indexer_config import TVINFO_TVDB
empty_return = (None, None, None)
metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename)
if not ek.ek(os.path.isdir, folder) or not ek.ek(os.path.isfile, metadata_path):
logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG)
return empty_return
logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG)
try:
with ek.ek(io.open, metadata_path, 'r', encoding='utf8') as xmlFileObj:
showXML = etree.ElementTree(file=xmlFileObj)
if None is showXML.findtext('title') \
or all(None is _f for _f in (showXML.find('//uniqueid[@type]'),
showXML.findtext('tvdbid'),
showXML.findtext('id'),
showXML.findtext('indexer'))):
logger.log(u"Invalid info in tvshow.nfo (missing name or id):"
+ str(showXML.findtext('title')) + ' '
+ str(showXML.findtext('indexer')) + ' '
+ str(showXML.findtext('tvdbid')) + ' '
+ str(showXML.findtext('id')))
return empty_return
name = showXML.findtext('title')
try:
tvid = int(showXML.findtext('indexer'))
except (BaseException, Exception):
tvid = None
# handle v2 format of .nfo file
default_source = showXML.find('//uniqueid[@default="true"]')
if None is not default_source:
use_tvid = default_source.attrib.get('type') or tvid
if isinstance(use_tvid, string_types):
use_tvid = {sickbeard.TVInfoAPI(x).config['slug']: x
for x, _ in iteritems(sickbeard.TVInfoAPI().all_sources)}.get(use_tvid)
prodid = sg_helpers.try_int(default_source.text, None)
if use_tvid and None is not prodid:
return use_tvid, prodid, name
prodid = showXML.find('//uniqueid[@type="tvdb"]')
if None is not prodid:
prodid = int(prodid.text)
tvid = TVINFO_TVDB
elif None is not showXML.findtext('tvdbid'):
prodid = int(showXML.findtext('tvdbid'))
tvid = TVINFO_TVDB
elif None is not showXML.findtext('id'):
prodid = int(showXML.findtext('id'))
try:
tvid = TVINFO_TVDB if [s for s in showXML.findall('.//*')
if s.text and -1 != s.text.find('thetvdb.com')] else tvid
except (BaseException, Exception):
pass
else:
logger.log(u"Empty <id> or <tvdbid> field in NFO, unable to find a ID", logger.WARNING)
return empty_return
if None is prodid:
logger.log(u"Invalid Show ID (%s), not using metadata file" % prodid, logger.WARNING)
return empty_return
except (BaseException, Exception) as e:
logger.log(
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
logger.WARNING)
return empty_return
return tvid, prodid, name
def _fanart_urls_from_show(self, show_obj, image_type='banner', lang='en', thumb=False):
# type: (sickbeard.tv.TVShow, AnyStr, AnyStr, bool) -> Optional[List[int, int, AnyStr]]
try:
tvdb_id = show_obj.ids.get(indexer_config.TVINFO_TVDB, {}).get('id', None)
if tvdb_id:
return self._fanart_urls(tvdb_id, image_type, lang, thumb)
except (BaseException, Exception):
pass
logger.log(u'Could not find any %s images on Fanart.tv for %s' % (image_type, show_obj.name), logger.DEBUG)
@staticmethod
def _fanart_urls(tvdb_id, image_type='banner', lang='en', thumb=False):
# type: (int, AnyStr, AnyStr, bool) -> Optional[List[int, int, AnyStr]]
types = {'poster': fanart.TYPE.TV.POSTER,
'banner': fanart.TYPE.TV.BANNER,
'fanart': fanart.TYPE.TV.BACKGROUND,
'poster_thumb': fanart.TYPE.TV.POSTER,
'banner_thumb': fanart.TYPE.TV.BANNER}
try:
if tvdb_id:
request = fanartRequest(apikey=sickbeard.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type])
resp = request.response()
itemlist = []
dedupe = []
for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
# remove "[0:2]" ... to strictly use only data where "en" is at source
resp[types[image_type]]): # type: dict
try:
url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb]
if url not in dedupe:
dedupe += [url]
itemlist += [
[int(art['id']), int(art['likes']), url]
]
except (BaseException, Exception):
continue
itemlist.sort(key=lambda a: (a[1], a[0]), reverse=True)
return itemlist
except (BaseException, Exception):
raise
def retrieve_show_image(self, image_type, show_obj, which=None, return_links=False, show_infos=None):
# type: (AnyStr, sickbeard.tv.TVShow, bool, bool, ShowInfosDict) -> Optional[bytes]
return self._retrieve_show_image(image_type=image_type, show_obj=show_obj, which=which,
return_links=return_links, show_infos=show_infos)
def write_image(self, image_data, image_path, force=False):
# type: (bytes, AnyStr, bool) -> bool
return self._write_image(image_data=image_data, image_path=image_path, force=force)
| SickGear/SickGear | sickbeard/metadata/generic.py | Python | gpl-3.0 | 53,307 |
# -*- coding: utf-8 -*-
#
# Copyright © 2009-2010 Pierre Raybaut
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""External Python Shell widget: execute Python script in a separate process"""
import sys
import os
import os.path as osp
import socket
from spyderlib.qt.QtGui import QApplication, QMessageBox, QSplitter, QMenu
from spyderlib.qt.QtCore import QProcess, SIGNAL, Qt
from spyderlib.qt.compat import getexistingdirectory
# Local imports
from spyderlib.utils.qthelpers import (get_icon, get_std_icon, add_actions,
create_toolbutton, create_action,
DialogManager)
from spyderlib.utils.environ import RemoteEnvDialog
from spyderlib.utils.programs import get_python_args
from spyderlib.utils.misc import get_python_executable
from spyderlib.baseconfig import (_, get_module_source_path, DEBUG,
MAC_APP_NAME, running_in_mac_app)
from spyderlib.widgets.shell import PythonShellWidget
from spyderlib.widgets.externalshell.namespacebrowser import NamespaceBrowser
from spyderlib.utils.bsdsocket import communicate, write_packet
from spyderlib.widgets.externalshell.baseshell import (ExternalShellBase,
add_pathlist_to_PYTHONPATH)
from spyderlib.widgets.dicteditor import DictEditor
from spyderlib.py3compat import (is_text_string, to_text_string,
to_binary_string)
class ExtPythonShellWidget(PythonShellWidget):
def __init__(self, parent, history_filename, profile=False):
PythonShellWidget.__init__(self, parent, history_filename, profile)
self.path = []
def set_externalshell(self, externalshell):
# ExternalShellBase instance:
self.externalshell = externalshell
def clear_terminal(self):
"""Reimplement ShellBaseWidget method"""
self.clear()
self.emit(SIGNAL("execute(QString)"), "\n")
def execute_lines(self, lines):
"""
Execute a set of lines as multiple command
lines: multiple lines of text to be executed as single commands
"""
for line in lines.splitlines():
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue
self.write(line+os.linesep, flush=True)
self.execute_command(line)
# Workaround for Issue 502
# Emmiting wait_for_ready_read was making the console hang
# in Mac OS X
if sys.platform.startswith("darwin"):
import time
time.sleep(0.025)
else:
self.emit(SIGNAL("wait_for_ready_read()"))
self.flush()
#------ Code completion / Calltips
def ask_monitor(self, command, settings=[]):
sock = self.externalshell.introspection_socket
if sock is None:
return
try:
return communicate(sock, command, settings=settings)
except socket.error:
# Process was just closed
pass
except MemoryError:
# Happens when monitor is not ready on slow machines
pass
def get_dir(self, objtxt):
"""Return dir(object)"""
return self.ask_monitor("__get_dir__('%s')" % objtxt)
def get_globals_keys(self):
"""Return shell globals() keys"""
return self.ask_monitor("get_globals_keys()")
def get_cdlistdir(self):
"""Return shell current directory list dir"""
return self.ask_monitor("getcdlistdir()")
def iscallable(self, objtxt):
"""Is object callable?"""
return self.ask_monitor("__iscallable__('%s')" % objtxt)
def get_arglist(self, objtxt):
"""Get func/method argument list"""
return self.ask_monitor("__get_arglist__('%s')" % objtxt)
def get__doc__(self, objtxt):
"""Get object __doc__"""
return self.ask_monitor("__get__doc____('%s')" % objtxt)
def get_doc(self, objtxt):
"""Get object documentation dictionary"""
return self.ask_monitor("__get_doc__('%s')" % objtxt)
def get_source(self, objtxt):
"""Get object source"""
return self.ask_monitor("__get_source__('%s')" % objtxt)
def is_defined(self, objtxt, force_import=False):
"""Return True if object is defined"""
return self.ask_monitor("isdefined('%s', force_import=%s)"
% (objtxt, force_import))
def get_module_completion(self, objtxt):
"""Return module completion list associated to object name"""
return self.ask_monitor("getmodcomplist('%s', %s)" % \
(objtxt, self.path))
def get_cwd(self):
"""Return shell current working directory"""
return self.ask_monitor("getcwd()")
def set_cwd(self, dirname):
"""Set shell current working directory"""
return self.ask_monitor("setcwd(r'%s')" % dirname)
def get_env(self):
"""Return environment variables: os.environ"""
return self.ask_monitor("getenv()")
def set_env(self, env):
"""Set environment variables via os.environ"""
return self.ask_monitor('setenv()', settings=[env])
def get_syspath(self):
"""Return sys.path[:]"""
return self.ask_monitor("getsyspath()")
def set_spyder_breakpoints(self):
"""Set Spyder breakpoints into debugging session"""
return self.ask_monitor("set_spyder_breakpoints()")
class ExternalPythonShell(ExternalShellBase):
"""External Shell widget: execute Python script in a separate process"""
SHELL_CLASS = ExtPythonShellWidget
def __init__(self, parent=None, fname=None, wdir=None,
interact=False, debug=False, path=[], python_args='',
ipykernel=False, arguments='', stand_alone=None,
umr_enabled=True, umr_namelist=[], umr_verbose=True,
pythonstartup=None, pythonexecutable=None,
monitor_enabled=True, mpl_backend=None, ets_backend='qt4',
qt_api=None, pyqt_api=0,
ignore_sip_setapi_errors=False, merge_output_channels=False,
colorize_sys_stderr=False, autorefresh_timeout=3000,
autorefresh_state=True, light_background=True,
menu_actions=None, show_buttons_inside=True,
show_elapsed_time=True):
assert qt_api in (None, 'pyqt', 'pyside')
self.namespacebrowser = None # namespace browser widget!
self.dialog_manager = DialogManager()
self.stand_alone = stand_alone # stand alone settings (None: plugin)
self.interact = interact
self.is_ipykernel = ipykernel
self.pythonstartup = pythonstartup
self.pythonexecutable = pythonexecutable
self.monitor_enabled = monitor_enabled
self.mpl_backend = mpl_backend
self.ets_backend = ets_backend
self.qt_api = qt_api
self.pyqt_api = pyqt_api
self.ignore_sip_setapi_errors = ignore_sip_setapi_errors
self.merge_output_channels = merge_output_channels
self.colorize_sys_stderr = colorize_sys_stderr
self.umr_enabled = umr_enabled
self.umr_namelist = umr_namelist
self.umr_verbose = umr_verbose
self.autorefresh_timeout = autorefresh_timeout
self.autorefresh_state = autorefresh_state
self.namespacebrowser_button = None
self.cwd_button = None
self.env_button = None
self.syspath_button = None
self.terminate_button = None
self.notification_thread = None
ExternalShellBase.__init__(self, parent=parent, fname=fname, wdir=wdir,
history_filename='history.py',
light_background=light_background,
menu_actions=menu_actions,
show_buttons_inside=show_buttons_inside,
show_elapsed_time=show_elapsed_time)
if self.pythonexecutable is None:
self.pythonexecutable = get_python_executable()
self.python_args = None
if python_args:
assert is_text_string(python_args)
self.python_args = python_args
assert is_text_string(arguments)
self.arguments = arguments
self.connection_file = None
if self.is_ipykernel:
self.interact = False
# Running our custom startup script for IPython kernels:
# (see spyderlib/widgets/externalshell/start_ipython_kernel.py)
self.fname = get_module_source_path(
'spyderlib.widgets.externalshell', 'start_ipython_kernel.py')
self.shell.set_externalshell(self)
self.toggle_globals_explorer(False)
self.interact_action.setChecked(self.interact)
self.debug_action.setChecked(debug)
self.introspection_socket = None
self.is_interpreter = fname is None
if self.is_interpreter:
self.terminate_button.hide()
# Additional python path list
self.path = path
self.shell.path = path
def set_introspection_socket(self, introspection_socket):
self.introspection_socket = introspection_socket
if self.namespacebrowser is not None:
settings = self.namespacebrowser.get_view_settings()
communicate(introspection_socket,
'set_remote_view_settings()', settings=[settings])
def set_autorefresh_timeout(self, interval):
if self.introspection_socket is not None:
try:
communicate(self.introspection_socket,
"set_monitor_timeout(%d)" % interval)
except socket.error:
pass
def closeEvent(self, event):
self.quit_monitor()
ExternalShellBase.closeEvent(self, event)
def get_toolbar_buttons(self):
ExternalShellBase.get_toolbar_buttons(self)
if self.namespacebrowser_button is None \
and self.stand_alone is not None:
self.namespacebrowser_button = create_toolbutton(self,
text=_("Variables"), icon=get_icon('dictedit.png'),
tip=_("Show/hide global variables explorer"),
toggled=self.toggle_globals_explorer, text_beside_icon=True)
if self.terminate_button is None:
self.terminate_button = create_toolbutton(self,
text=_("Terminate"), icon=get_icon('stop.png'),
tip=_("Attempts to stop the process. The process\n"
"may not exit as a result of clicking this\n"
"button (it is given the chance to prompt\n"
"the user for any unsaved files, etc)."))
buttons = []
if self.namespacebrowser_button is not None:
buttons.append(self.namespacebrowser_button)
buttons += [self.run_button, self.terminate_button, self.kill_button,
self.options_button]
return buttons
def get_options_menu(self):
ExternalShellBase.get_options_menu(self)
self.interact_action = create_action(self, _("Interact"))
self.interact_action.setCheckable(True)
self.debug_action = create_action(self, _("Debug"))
self.debug_action.setCheckable(True)
self.args_action = create_action(self, _("Arguments..."),
triggered=self.get_arguments)
run_settings_menu = QMenu(_("Run settings"), self)
add_actions(run_settings_menu,
(self.interact_action, self.debug_action, self.args_action))
self.cwd_button = create_action(self, _("Working directory"),
icon=get_std_icon('DirOpenIcon'),
tip=_("Set current working directory"),
triggered=self.set_current_working_directory)
self.env_button = create_action(self, _("Environment variables"),
icon=get_icon('environ.png'),
triggered=self.show_env)
self.syspath_button = create_action(self,
_("Show sys.path contents"),
icon=get_icon('syspath.png'),
triggered=self.show_syspath)
actions = [run_settings_menu, self.show_time_action, None,
self.cwd_button, self.env_button, self.syspath_button]
if self.menu_actions is not None:
actions += [None]+self.menu_actions
return actions
def is_interpreter(self):
"""Return True if shellwidget is a Python interpreter"""
return self.is_interpreter
def get_shell_widget(self):
if self.stand_alone is None:
return self.shell
else:
self.namespacebrowser = NamespaceBrowser(self)
settings = self.stand_alone
self.namespacebrowser.set_shellwidget(self)
self.namespacebrowser.setup(**settings)
self.connect(self.namespacebrowser, SIGNAL('collapse()'),
lambda: self.toggle_globals_explorer(False))
# Shell splitter
self.splitter = splitter = QSplitter(Qt.Vertical, self)
self.connect(self.splitter, SIGNAL('splitterMoved(int, int)'),
self.splitter_moved)
splitter.addWidget(self.shell)
splitter.setCollapsible(0, False)
splitter.addWidget(self.namespacebrowser)
splitter.setStretchFactor(0, 1)
splitter.setStretchFactor(1, 0)
splitter.setHandleWidth(5)
splitter.setSizes([2, 1])
return splitter
def get_icon(self):
return get_icon('python.png')
def set_buttons_runnning_state(self, state):
ExternalShellBase.set_buttons_runnning_state(self, state)
self.interact_action.setEnabled(not state and not self.is_interpreter)
self.debug_action.setEnabled(not state and not self.is_interpreter)
self.args_action.setEnabled(not state and not self.is_interpreter)
if state:
if self.arguments:
argstr = _("Arguments: %s") % self.arguments
else:
argstr = _("No argument")
else:
argstr = _("Arguments...")
self.args_action.setText(argstr)
self.terminate_button.setVisible(not self.is_interpreter and state)
if not state:
self.toggle_globals_explorer(False)
for btn in (self.cwd_button, self.env_button, self.syspath_button):
btn.setEnabled(state and self.monitor_enabled)
if self.namespacebrowser_button is not None:
self.namespacebrowser_button.setEnabled(state)
def set_namespacebrowser(self, namespacebrowser):
"""
Set namespace browser *widget*
Note: this method is not used in stand alone mode
"""
self.namespacebrowser = namespacebrowser
self.configure_namespacebrowser()
def configure_namespacebrowser(self):
"""Connect the namespace browser to the notification thread"""
if self.notification_thread is not None:
self.connect(self.notification_thread,
SIGNAL('refresh_namespace_browser()'),
self.namespacebrowser.refresh_table)
signal = self.notification_thread.sig_process_remote_view
signal.connect(lambda data:
self.namespacebrowser.process_remote_view(data))
def create_process(self):
self.shell.clear()
self.process = QProcess(self)
if self.merge_output_channels:
self.process.setProcessChannelMode(QProcess.MergedChannels)
else:
self.process.setProcessChannelMode(QProcess.SeparateChannels)
self.connect(self.shell, SIGNAL("wait_for_ready_read()"),
lambda: self.process.waitForReadyRead(250))
# Working directory
if self.wdir is not None:
self.process.setWorkingDirectory(self.wdir)
#-------------------------Python specific-------------------------------
# Python arguments
p_args = ['-u']
if DEBUG >= 3:
p_args += ['-v']
p_args += get_python_args(self.fname, self.python_args,
self.interact_action.isChecked(),
self.debug_action.isChecked(),
self.arguments)
env = [to_text_string(_path)
for _path in self.process.systemEnvironment()]
if self.pythonstartup:
env.append('PYTHONSTARTUP=%s' % self.pythonstartup)
# Set standard input/output encoding for Python consoles
# (IPython handles it on its own)
# See http://stackoverflow.com/q/26312400/438386, specifically
# the comments of Martijn Pieters
if not self.is_ipykernel:
env.append('PYTHONIOENCODING=UTF-8')
# Monitor
if self.monitor_enabled:
env.append('SPYDER_SHELL_ID=%s' % id(self))
env.append('SPYDER_AR_TIMEOUT=%d' % self.autorefresh_timeout)
env.append('SPYDER_AR_STATE=%r' % self.autorefresh_state)
from spyderlib.widgets.externalshell import introspection
introspection_server = introspection.start_introspection_server()
introspection_server.register(self)
notification_server = introspection.start_notification_server()
self.notification_thread = notification_server.register(self)
self.connect(self.notification_thread, SIGNAL('pdb(QString,int)'),
lambda fname, lineno:
self.emit(SIGNAL('pdb(QString,int)'), fname, lineno))
self.connect(self.notification_thread,
SIGNAL('new_ipython_kernel(QString)'),
lambda args:
self.emit(SIGNAL('create_ipython_client(QString)'),
args))
self.connect(self.notification_thread,
SIGNAL('open_file(QString,int)'),
lambda fname, lineno:
self.emit(SIGNAL('open_file(QString,int)'),
fname, lineno))
if self.namespacebrowser is not None:
self.configure_namespacebrowser()
env.append('SPYDER_I_PORT=%d' % introspection_server.port)
env.append('SPYDER_N_PORT=%d' % notification_server.port)
# External modules options
env.append('ETS_TOOLKIT=%s' % self.ets_backend)
if self.mpl_backend:
env.append('MATPLOTLIB_BACKEND=%s' % self.mpl_backend)
if self.qt_api:
env.append('QT_API=%s' % self.qt_api)
env.append('COLORIZE_SYS_STDERR=%s' % self.colorize_sys_stderr)
# # Socket-based alternative (see input hook in sitecustomize.py):
# if self.install_qt_inputhook:
# from PyQt4.QtNetwork import QLocalServer
# self.local_server = QLocalServer()
# self.local_server.listen(str(id(self)))
if self.pyqt_api:
env.append('PYQT_API=%d' % self.pyqt_api)
env.append('IGNORE_SIP_SETAPI_ERRORS=%s'
% self.ignore_sip_setapi_errors)
# User Module Deleter
if self.is_interpreter:
env.append('UMR_ENABLED=%r' % self.umr_enabled)
env.append('UMR_NAMELIST=%s' % ','.join(self.umr_namelist))
env.append('UMR_VERBOSE=%r' % self.umr_verbose)
env.append('MATPLOTLIB_ION=True')
else:
if self.interact:
env.append('MATPLOTLIB_ION=True')
else:
env.append('MATPLOTLIB_ION=False')
# IPython kernel
env.append('IPYTHON_KERNEL=%r' % self.is_ipykernel)
# Add sitecustomize path to path list
pathlist = []
scpath = osp.dirname(osp.abspath(__file__))
pathlist.append(scpath)
# Adding Spyder path
pathlist += self.path
# Adding path list to PYTHONPATH environment variable
add_pathlist_to_PYTHONPATH(env, pathlist)
#-------------------------Python specific-------------------------------
self.connect(self.process, SIGNAL("readyReadStandardOutput()"),
self.write_output)
self.connect(self.process, SIGNAL("readyReadStandardError()"),
self.write_error)
self.connect(self.process, SIGNAL("finished(int,QProcess::ExitStatus)"),
self.finished)
self.connect(self, SIGNAL('finished()'), self.dialog_manager.close_all)
self.connect(self.terminate_button, SIGNAL("clicked()"),
self.process.terminate)
self.connect(self.kill_button, SIGNAL("clicked()"),
self.process.kill)
#-------------------------Python specific-------------------------------
# Fixes for our Mac app:
# 1. PYTHONPATH and PYTHONHOME are set while bootstrapping the app,
# but their values are messing sys.path for external interpreters
# (e.g. EPD) so we need to remove them from the environment.
# 2. Set PYTHONPATH again but without grabbing entries defined in the
# environment (Fixes Issue 1321)
# 3. Remove PYTHONOPTIMIZE from env so that we can have assert
# statements working with our interpreters (See Issue 1281)
if running_in_mac_app():
env.append('SPYDER_INTERPRETER=%s' % self.pythonexecutable)
if MAC_APP_NAME not in self.pythonexecutable:
env = [p for p in env if not (p.startswith('PYTHONPATH') or \
p.startswith('PYTHONHOME'))] # 1.
add_pathlist_to_PYTHONPATH(env, pathlist, drop_env=True) # 2.
env = [p for p in env if not p.startswith('PYTHONOPTIMIZE')] # 3.
self.process.setEnvironment(env)
self.process.start(self.pythonexecutable, p_args)
#-------------------------Python specific-------------------------------
running = self.process.waitForStarted(3000)
self.set_running_state(running)
if not running:
if self.is_ipykernel:
self.emit(SIGNAL("ipython_kernel_start_error(QString)"),
_("The kernel failed to start!! That's all we know... "
"Please close this console and open a new one."))
else:
QMessageBox.critical(self, _("Error"),
_("A Python console failed to start!"))
else:
self.shell.setFocus()
self.emit(SIGNAL('started()'))
return self.process
def finished(self, exit_code, exit_status):
"""Reimplement ExternalShellBase method"""
if self.is_ipykernel and exit_code == 1:
self.emit(SIGNAL("ipython_kernel_start_error(QString)"),
self.shell.get_text_with_eol())
ExternalShellBase.finished(self, exit_code, exit_status)
self.introspection_socket = None
#===============================================================================
# Input/Output
#===============================================================================
def write_error(self):
if os.name == 'nt':
#---This is apparently necessary only on Windows (not sure though):
# emptying standard output buffer before writing error output
self.process.setReadChannel(QProcess.StandardOutput)
if self.process.waitForReadyRead(1):
self.write_output()
self.shell.write_error(self.get_stderr())
QApplication.processEvents()
def send_to_process(self, text):
if not self.is_running():
return
if not is_text_string(text):
text = to_text_string(text)
if self.mpl_backend == 'Qt4Agg' and os.name == 'nt' and \
self.introspection_socket is not None:
communicate(self.introspection_socket,
"toggle_inputhook_flag(True)")
# # Socket-based alternative (see input hook in sitecustomize.py):
# while self.local_server.hasPendingConnections():
# self.local_server.nextPendingConnection().write('go!')
if any([text == cmd for cmd in ['%ls', '%pwd', '%scientific']]) or \
any([text.startswith(cmd) for cmd in ['%cd ', '%clear ']]):
text = 'evalsc(r"%s")\n' % text
if not text.endswith('\n'):
text += '\n'
self.process.write(to_binary_string(text, 'utf8'))
self.process.waitForBytesWritten(-1)
# Eventually write prompt faster (when hitting Enter continuously)
# -- necessary/working on Windows only:
if os.name == 'nt':
self.write_error()
def keyboard_interrupt(self):
if self.introspection_socket is not None:
communicate(self.introspection_socket, "thread.interrupt_main()")
def quit_monitor(self):
if self.introspection_socket is not None:
try:
write_packet(self.introspection_socket, "thread.exit()")
except socket.error:
pass
#===============================================================================
# Globals explorer
#===============================================================================
def toggle_globals_explorer(self, state):
if self.stand_alone is not None:
self.splitter.setSizes([1, 1 if state else 0])
self.namespacebrowser_button.setChecked(state)
if state and self.namespacebrowser is not None:
self.namespacebrowser.refresh_table()
def splitter_moved(self, pos, index):
self.namespacebrowser_button.setChecked( self.splitter.sizes()[1] )
#===============================================================================
# Misc.
#===============================================================================
def set_current_working_directory(self):
"""Set current working directory"""
cwd = self.shell.get_cwd()
self.emit(SIGNAL('redirect_stdio(bool)'), False)
directory = getexistingdirectory(self, _("Select directory"), cwd)
if directory:
self.shell.set_cwd(directory)
self.emit(SIGNAL('redirect_stdio(bool)'), True)
def show_env(self):
"""Show environment variables"""
get_func = self.shell.get_env
set_func = self.shell.set_env
self.dialog_manager.show(RemoteEnvDialog(get_func, set_func))
def show_syspath(self):
"""Show sys.path contents"""
editor = DictEditor()
editor.setup(self.shell.get_syspath(), title="sys.path", readonly=True,
width=600, icon='syspath.png')
self.dialog_manager.show(editor)
| kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/spyderlib/widgets/externalshell/pythonshell.py | Python | gpl-3.0 | 27,927 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
import argparse as _argparse
import os
import sys
import tempfile
from mo_dots import coalesce, listwrap, unwrap, to_data
from mo_logs import Log
# PARAMETERS MATCH argparse.ArgumentParser.add_argument()
# https://docs.python.org/dev/library/argparse.html#the-add-argument-method
#
# name or flags - Either a name or a list of option strings, e.g. foo or -f, --foo.
# action - The basic type of action to be taken when this argument is encountered at the command line.
# nargs - The number of command-line arguments that should be consumed.
# const - A constant value required by some action and nargs selections.
# default - The value produced if the argument is absent from the command line.
# type - The type to which the command-line argument should be converted.
# choices - A container of the allowable values for the argument.
# required - Whether or not the command-line option may be omitted (optionals only).
# help - A brief description of what the argument does.
# metavar - A name for the argument in usage messages.
# dest - The name of the attribute to be added to the object returned by parse_args().
class _ArgParser(_argparse.ArgumentParser):
def error(self, message):
Log.error("argparse error: {{error}}", error=message)
def argparse(defs, complain=True):
parser = _ArgParser()
for d in listwrap(defs):
args = d.copy()
name = args.name
args.name = None
parser.add_argument(*unwrap(listwrap(name)), **args)
namespace, unknown = parser.parse_known_args()
if unknown and complain:
Log.warning("Ignoring arguments: {{unknown|json}}", unknown=unknown)
output = {k: getattr(namespace, k) for k in vars(namespace)}
return to_data(output)
def read_settings(defs=None, filename=None, default_filename=None, complain=True):
"""
:param filename: Force load a file
:param defs: more arguments you want to accept (see https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument)
:param default_filename: A config file from an environment variable (a fallback config file, if no other provided)
:parma complain: Complain about args mismatch
"""
from mo_files import File
import mo_json_config
# READ SETTINGS
defs = listwrap(defs)
defs.append({
"name": ["--config", "--settings", "--settings-file", "--settings_file"],
"help": "path to JSON file with settings",
"type": str,
"dest": "filename",
"default": None,
"required": False,
})
args = argparse(defs, complain)
args.filename = coalesce(
filename,
args.filename if args.filename.endswith(".json") else None,
default_filename,
"./config.json",
)
settings_file = File(args.filename)
if settings_file.exists:
Log.note("Using {{filename}} for configuration", filename=settings_file.abspath)
else:
Log.error(
"Can not read configuration file {{filename}}",
filename=settings_file.abspath,
)
settings = mo_json_config.get_file(settings_file)
settings.args = args
return settings
# snagged from https://github.com/pycontribs/tendo/blob/master/tendo/singleton.py (under licence PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2)
class SingleInstance:
"""
ONLY ONE INSTANCE OF PROGRAM ALLOWED
If you want to prevent your script from running in parallel just instantiate SingleInstance() class.
If is there another instance already running it will exist the application with the message
"Another instance is already running, quitting.", returning -1 error code.
with SingleInstance():
<your code here>
settings = startup.read_settings()
with SingleInstance(settings.args.filename):
<your code here>
This option is very useful if you have scripts executed by crontab at small intervals, causing multiple instances
Remember that this works by creating a lock file with a filename based on the full path to the script file.
"""
def __init__(self, flavor_id=""):
self.initialized = False
appname = os.path.splitext(os.path.abspath(sys.argv[0]))[0]
basename = ((appname + "-%s") % flavor_id).replace("/", "-").replace(
":", ""
).replace("\\", "-").replace("-.-", "-") + ".lock"
self.lockfile = os.path.normpath(tempfile.gettempdir() + "/" + basename)
def __enter__(self):
Log.note("SingleInstance.lockfile = " + self.lockfile)
if sys.platform == "win32":
try:
# file already exists, we try to remove (in case previous execution was interrupted)
if os.path.exists(self.lockfile):
os.unlink(self.lockfile)
self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)
except Exception as e:
Log.alarm("Another instance is already running, quitting.")
sys.exit(-1)
else: # non Windows
import fcntl
self.fp = open(self.lockfile, "w")
try:
fcntl.lockf(self.fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
Log.alarm("Another instance is already running, quitting.")
sys.exit(-1)
self.initialized = True
def __exit__(self, type, value, traceback):
self.__del__()
def __del__(self):
temp, self.initialized = self.initialized, False
if not temp:
return
try:
if sys.platform == "win32":
if hasattr(self, "fd"):
os.close(self.fd)
os.unlink(self.lockfile)
else:
import fcntl
fcntl.lockf(self.fp, fcntl.LOCK_UN)
if os.path.isfile(self.lockfile):
os.unlink(self.lockfile)
except Exception as e:
Log.warning("Problem with SingleInstance __del__()", e)
sys.exit(-1)
| klahnakoski/ActiveData | vendor/mo_logs/startup.py | Python | mpl-2.0 | 6,384 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MPS - Sale forecast",
"version": "1.0",
"depends": [
"base",
"product",
"sale",
"stock",
],
"author": "OdooMRP team",
"contributors": [
"Oihane Crucelaegui <[email protected]>",
"Ainara Galdona <[email protected]>",
"Pedro M. Baeza <[email protected]>",
"Ana Juaristi <[email protected]>",
],
"category": "MPS",
"website": "http://www.odoomrp.com",
"summary": "Sale forecast",
"data": ["security/ir.model.access.csv",
"wizard/sale_forecast_load_view.xml",
"views/sale_view.xml",
],
"installable": True,
"auto_install": False,
}
| StefanRijnhart/odoomrp-wip | procurement_sale_forecast/__openerp__.py | Python | agpl-3.0 | 1,597 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
import webnotes
def execute():
webnotes.conn.sql("""drop table if exists `tabDocType Mapper`""")
webnotes.conn.sql("""drop table if exists `tabTable Mapper Detail`""")
webnotes.conn.sql("""drop table if exists `tabField Mapper Detail`""")
webnotes.delete_doc("DocType", "DocType Mapper")
webnotes.delete_doc("DocType", "Table Mapper Detail")
webnotes.delete_doc("DocType", "Field Mapper Detail") | gangadhar-kadam/sapphire_app | patches/july_2013/p01_remove_doctype_mappers.py | Python | agpl-3.0 | 516 |
from .base_executor import ScriptExecutor
from judgeenv import env
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
fs = ['.*\.(?:so|rb$)', '/etc/localtime$', '/dev/urandom$', '/proc/self', '/usr/lib/ruby/gems/']
test_program = 'puts gets'
@classmethod
def get_command(cls):
return env['runtime'].get(cls.name.lower())
| buhe/judge | executors/ruby.py | Python | agpl-3.0 | 393 |
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import sys
import os
import imp
import subprocess
import re
import json
import pprint
import shutil
import copy
import StringIO
import logging
import itertools
import numpy
import time
import math
import uuid
import tempfile
from pkg_resources import resource_filename
from optparse import OptionParser
from nupic.database.ClientJobsDAO import ClientJobsDAO
from nupic.support import configuration, initLogging
from nupic.support.unittesthelpers.testcasebase import (unittest,
TestCaseBase as HelperTestCaseBase)
from nupic.swarming import HypersearchWorker
from nupic.swarming.api import getSwarmModelParams, createAndStartSwarm
from nupic.swarming.hypersearch.utils import generatePersistentJobGUID
from nupic.swarming.DummyModelRunner import OPFDummyModelRunner
DEFAULT_JOB_TIMEOUT_SEC = 60 * 2
# Filters _debugOut messages
g_debug = True
# Our setUpModule entry block sets this to an instance of MyTestEnvironment()
g_myEnv = None
# These are the args after using the optparse
# This value for the swarm maturity window gives more repeatable results for
# unit tests that use multiple workers
g_repeatableSwarmMaturityWindow = 5
class MyTestEnvironment(object):
# =======================================================================
def __init__(self):
# Save all command line options
self.options = _ArgParser.parseArgs()
# Create the path to our source experiments
thisFile = __file__
testDir = os.path.split(os.path.abspath(thisFile))[0]
self.testSrcExpDir = os.path.join(testDir, 'experiments')
self.testSrcDataDir = os.path.join(testDir, 'data')
return
class ExperimentTestBaseClass(HelperTestCaseBase):
def setUp(self):
""" Method called to prepare the test fixture. This is called by the
unittest framework immediately before calling the test method; any exception
raised by this method will be considered an error rather than a test
failure. The default implementation does nothing.
"""
pass
def tearDown(self):
""" Method called immediately after the test method has been called and the
result recorded. This is called even if the test method raised an exception,
so the implementation in subclasses may need to be particularly careful
about checking internal state. Any exception raised by this method will be
considered an error rather than a test failure. This method will only be
called if the setUp() succeeds, regardless of the outcome of the test
method. The default implementation does nothing.
"""
# Reset our log items
self.resetExtraLogItems()
def shortDescription(self):
""" Override to force unittest framework to use test method names instead
of docstrings in the report.
"""
return None
def _printTestHeader(self):
""" Print out what test we are running
"""
print "###############################################################"
print "Running test: %s.%s..." % (self.__class__, self._testMethodName)
def _setDataPath(self, env):
""" Put the path to our datasets int the NTA_DATA_PATH variable which
will be used to set the environment for each of the workers
Parameters:
---------------------------------------------------------------------
env: The current environment dict
"""
assert env is not None
# If already have a path, concatenate to it
if "NTA_DATA_PATH" in env:
newPath = "%s:%s" % (env["NTA_DATA_PATH"], g_myEnv.testSrcDataDir)
else:
newPath = g_myEnv.testSrcDataDir
env["NTA_DATA_PATH"] = newPath
def _launchWorkers(self, cmdLine, numWorkers):
""" Launch worker processes to execute the given command line
Parameters:
-----------------------------------------------
cmdLine: The command line for each worker
numWorkers: number of workers to launch
retval: list of workers
"""
workers = []
for i in range(numWorkers):
stdout = tempfile.TemporaryFile()
stderr = tempfile.TemporaryFile()
p = subprocess.Popen(cmdLine, bufsize=1, env=os.environ, shell=True,
stdin=None, stdout=stdout, stderr=stderr)
workers.append(p)
return workers
def _getJobInfo(self, cjDAO, workers, jobID):
""" Return the job info for a job
Parameters:
-----------------------------------------------
cjDAO: client jobs database instance
workers: list of workers for this job
jobID: which job ID
retval: job info
"""
# Get the job info
jobInfo = cjDAO.jobInfo(jobID)
# Since we're running outside of the Nupic engine, we launched the workers
# ourself, so see how many are still running and jam the correct status
# into the job info. When using the Nupic engine, it would do this
# for us.
runningCount = 0
for worker in workers:
retCode = worker.poll()
if retCode is None:
runningCount += 1
if runningCount > 0:
status = ClientJobsDAO.STATUS_RUNNING
else:
status = ClientJobsDAO.STATUS_COMPLETED
jobInfo = jobInfo._replace(status=status)
if status == ClientJobsDAO.STATUS_COMPLETED:
jobInfo = jobInfo._replace(
completionReason=ClientJobsDAO.CMPL_REASON_SUCCESS)
return jobInfo
def _generateHSJobParams(self,
expDirectory=None,
hsImp='v2',
maxModels=2,
predictionCacheMaxRecords=None,
dataPath=None,
maxRecords=10):
"""
This method generates a canned Hypersearch Job Params structure based
on some high level options
Parameters:
---------------------------------------------------------------------
predictionCacheMaxRecords:
If specified, determine the maximum number of records in
the prediction cache.
dataPath: When expDirectory is not specified, this is the data file
to be used for the operation. If this value is not specified,
it will use the /extra/qa/hotgym/qa_hotgym.csv.
"""
if expDirectory is not None:
descriptionPyPath = os.path.join(expDirectory, "description.py")
permutationsPyPath = os.path.join(expDirectory, "permutations.py")
permutationsPyContents = open(permutationsPyPath, 'rb').read()
descriptionPyContents = open(descriptionPyPath, 'rb').read()
jobParams = {'persistentJobGUID' : generatePersistentJobGUID(),
'permutationsPyContents': permutationsPyContents,
'descriptionPyContents': descriptionPyContents,
'maxModels': maxModels,
'hsVersion': hsImp}
if predictionCacheMaxRecords is not None:
jobParams['predictionCacheMaxRecords'] = predictionCacheMaxRecords
else:
# Form the stream definition
if dataPath is None:
dataPath = resource_filename("nupic.data",
os.path.join("extra", "qa", "hotgym",
"qa_hotgym.csv"))
streamDef = dict(
version = 1,
info = "TestHypersearch",
streams = [
dict(source="file://%s" % (dataPath),
info=dataPath,
columns=["*"],
first_record=0,
last_record=maxRecords),
],
)
# Generate the experiment description
expDesc = {
"predictionField": "consumption",
"streamDef": streamDef,
"includedFields": [
{ "fieldName": "gym",
"fieldType": "string"
},
{ "fieldName": "consumption",
"fieldType": "float",
"minValue": 0,
"maxValue": 200,
},
],
"iterationCount": maxRecords,
"resetPeriod": {
'weeks': 0,
'days': 0,
'hours': 8,
'minutes': 0,
'seconds': 0,
'milliseconds': 0,
'microseconds': 0,
},
}
jobParams = {
"persistentJobGUID": _generatePersistentJobGUID(),
"description":expDesc,
"maxModels": maxModels,
"hsVersion": hsImp,
}
if predictionCacheMaxRecords is not None:
jobParams['predictionCacheMaxRecords'] = predictionCacheMaxRecords
return jobParams
def _runPermutationsLocal(self, jobParams, loggingLevel=logging.INFO,
env=None, waitForCompletion=True,
continueJobId=None, ignoreErrModels=False):
""" This runs permutations on the given experiment using just 1 worker
in the current process
Parameters:
-------------------------------------------------------------------
jobParams: filled in job params for a hypersearch
loggingLevel: logging level to use in the Hypersearch worker
env: if not None, this is a dict of environment variables
that should be sent to each worker process. These can
aid in re-using the same description/permutations file
for different tests.
waitForCompletion: If True, wait for job to complete before returning
If False, then return resultsInfoForAllModels and
metricResults will be None
continueJobId: If not None, then this is the JobId of a job we want
to continue working on with another worker.
ignoreErrModels: If true, ignore erred models
retval: (jobId, jobInfo, resultsInfoForAllModels, metricResults)
"""
print
print "=================================================================="
print "Running Hypersearch job using 1 worker in current process"
print "=================================================================="
# Plug in modified environment variables
if env is not None:
saveEnvState = copy.deepcopy(os.environ)
os.environ.update(env)
# Insert the job entry into the database in the pre-running state
cjDAO = ClientJobsDAO.get()
if continueJobId is None:
jobID = cjDAO.jobInsert(client='test', cmdLine='<started manually>',
params=json.dumps(jobParams),
alreadyRunning=True, minimumWorkers=1, maximumWorkers=1,
jobType = cjDAO.JOB_TYPE_HS)
else:
jobID = continueJobId
# Command line args.
args = ['ignoreThis', '--jobID=%d' % (jobID),
'--logLevel=%d' % (loggingLevel)]
if continueJobId is None:
args.append('--clearModels')
# Run it in the current process
try:
HypersearchWorker.main(args)
# The dummy model runner will call sys.exit(0) when
# NTA_TEST_sysExitAfterNIterations is set
except SystemExit:
pass
except:
raise
# Restore environment
if env is not None:
os.environ = saveEnvState
# ----------------------------------------------------------------------
# Make sure all models completed successfully
models = cjDAO.modelsGetUpdateCounters(jobID)
modelIDs = [model.modelId for model in models]
if len(modelIDs) > 0:
results = cjDAO.modelsGetResultAndStatus(modelIDs)
else:
results = []
metricResults = []
for result in results:
if result.results is not None:
metricResults.append(json.loads(result.results)[1].values()[0])
else:
metricResults.append(None)
if not ignoreErrModels:
self.assertNotEqual(result.completionReason, cjDAO.CMPL_REASON_ERROR,
"Model did not complete successfully:\n%s" % (result.completionMsg))
# Print worker completion message
jobInfo = cjDAO.jobInfo(jobID)
return (jobID, jobInfo, results, metricResults)
def _runPermutationsCluster(self, jobParams, loggingLevel=logging.INFO,
maxNumWorkers=4, env=None,
waitForCompletion=True, ignoreErrModels=False,
timeoutSec=DEFAULT_JOB_TIMEOUT_SEC):
""" Given a prepared, filled in jobParams for a hypersearch, this starts
the job, waits for it to complete, and returns the results for all
models.
Parameters:
-------------------------------------------------------------------
jobParams: filled in job params for a hypersearch
loggingLevel: logging level to use in the Hypersearch worker
maxNumWorkers: max # of worker processes to use
env: if not None, this is a dict of environment variables
that should be sent to each worker process. These can
aid in re-using the same description/permutations file
for different tests.
waitForCompletion: If True, wait for job to complete before returning
If False, then return resultsInfoForAllModels and
metricResults will be None
ignoreErrModels: If true, ignore erred models
retval: (jobID, jobInfo, resultsInfoForAllModels, metricResults)
"""
print
print "=================================================================="
print "Running Hypersearch job on cluster"
print "=================================================================="
# --------------------------------------------------------------------
# Submit the job
if env is not None and len(env) > 0:
envItems = []
for (key, value) in env.iteritems():
envItems.append("export %s=%s" % (key, value))
envStr = "%s;" % (';'.join(envItems))
else:
envStr = ''
cmdLine = '%s python -m nupic.swarming.HypersearchWorker ' \
'--jobID={JOBID} --logLevel=%d' \
% (envStr, loggingLevel)
cjDAO = ClientJobsDAO.get()
jobID = cjDAO.jobInsert(client='test', cmdLine=cmdLine,
params=json.dumps(jobParams),
minimumWorkers=1, maximumWorkers=maxNumWorkers,
jobType = cjDAO.JOB_TYPE_HS)
# Launch the workers ourself if necessary (no nupic engine running).
workerCmdLine = '%s python -m nupic.swarming.HypersearchWorker ' \
'--jobID=%d --logLevel=%d' \
% (envStr, jobID, loggingLevel)
workers = self._launchWorkers(cmdLine=workerCmdLine, numWorkers=maxNumWorkers)
print "Successfully submitted new test job, jobID=%d" % (jobID)
print "Each of %d workers executing the command line: " % (maxNumWorkers), \
cmdLine
if not waitForCompletion:
return (jobID, None, None)
if timeoutSec is None:
timeout=DEFAULT_JOB_TIMEOUT_SEC
else:
timeout=timeoutSec
# --------------------------------------------------------------------
# Wait for it to complete
startTime = time.time()
lastUpdate = time.time()
lastCompleted = 0
lastCompletedWithError = 0
lastCompletedAsOrphan = 0
lastStarted = 0
lastJobStatus = "NA"
lastJobResults = None
lastActiveSwarms = None
lastEngStatus = None
modelIDs = []
print "\n%-15s %-15s %-15s %-15s %-15s" % ("jobStatus", "modelsStarted",
"modelsCompleted", "modelErrs", "modelOrphans")
print "-------------------------------------------------------------------"
while (lastJobStatus != ClientJobsDAO.STATUS_COMPLETED) \
and (time.time() - lastUpdate < timeout):
printUpdate = False
if g_myEnv.options.verbosity == 0:
time.sleep(0.5)
# --------------------------------------------------------------------
# Get the job status
jobInfo = self._getJobInfo(cjDAO, workers, jobID)
if jobInfo.status != lastJobStatus:
if jobInfo.status == ClientJobsDAO.STATUS_RUNNING \
and lastJobStatus != ClientJobsDAO.STATUS_RUNNING:
print "# Swarm job now running. jobID=%s" \
% (jobInfo.jobId)
lastJobStatus = jobInfo.status
printUpdate = True
if g_myEnv.options.verbosity >= 1:
if jobInfo.engWorkerState is not None:
activeSwarms = json.loads(jobInfo.engWorkerState)['activeSwarms']
if activeSwarms != lastActiveSwarms:
#print "-------------------------------------------------------"
print ">> Active swarms:\n ", '\n '.join(activeSwarms)
lastActiveSwarms = activeSwarms
print
if jobInfo.results != lastJobResults:
#print "-------------------------------------------------------"
print ">> New best:", jobInfo.results, "###"
lastJobResults = jobInfo.results
if jobInfo.engStatus != lastEngStatus:
print '>> Status: "%s"' % jobInfo.engStatus
print
lastEngStatus = jobInfo.engStatus
# --------------------------------------------------------------------
# Get the list of models created for this job
modelCounters = cjDAO.modelsGetUpdateCounters(jobID)
if len(modelCounters) != lastStarted:
modelIDs = [x.modelId for x in modelCounters]
lastStarted = len(modelCounters)
printUpdate = True
# --------------------------------------------------------------------
# See how many have finished
if len(modelIDs) > 0:
completed = 0
completedWithError = 0
completedAsOrphan = 0
infos = cjDAO.modelsGetResultAndStatus(modelIDs)
for info in infos:
if info.status == ClientJobsDAO.STATUS_COMPLETED:
completed += 1
if info.completionReason == ClientJobsDAO.CMPL_REASON_ERROR:
completedWithError += 1
if info.completionReason == ClientJobsDAO.CMPL_REASON_ORPHAN:
completedAsOrphan += 1
if completed != lastCompleted \
or completedWithError != lastCompletedWithError \
or completedAsOrphan != lastCompletedAsOrphan:
lastCompleted = completed
lastCompletedWithError = completedWithError
lastCompletedAsOrphan = completedAsOrphan
printUpdate = True
# --------------------------------------------------------------------
# Print update?
if printUpdate:
lastUpdate = time.time()
if g_myEnv.options.verbosity >= 1:
print ">>",
print "%-15s %-15d %-15d %-15d %-15d" % (lastJobStatus, lastStarted,
lastCompleted,
lastCompletedWithError,
lastCompletedAsOrphan)
# ========================================================================
# Final total
print "\n<< %-15s %-15d %-15d %-15d %-15d" % (lastJobStatus, lastStarted,
lastCompleted,
lastCompletedWithError,
lastCompletedAsOrphan)
# Success?
jobInfo = self._getJobInfo(cjDAO, workers, jobID)
if not ignoreErrModels:
self.assertEqual (jobInfo.completionReason,
ClientJobsDAO.CMPL_REASON_SUCCESS)
# Get final model results
models = cjDAO.modelsGetUpdateCounters(jobID)
modelIDs = [model.modelId for model in models]
if len(modelIDs) > 0:
results = cjDAO.modelsGetResultAndStatus(modelIDs)
else:
results = []
metricResults = []
for result in results:
if result.results is not None:
metricResults.append(json.loads(result.results)[1].values()[0])
else:
metricResults.append(None)
if not ignoreErrModels:
self.assertNotEqual(result.completionReason, cjDAO.CMPL_REASON_ERROR,
"Model did not complete successfully:\n%s" % (result.completionMsg))
return (jobID, jobInfo, results, metricResults)
def runPermutations(self, expDirectory, hsImp='v2', maxModels=2,
maxNumWorkers=4, loggingLevel=logging.INFO,
onCluster=False, env=None, waitForCompletion=True,
continueJobId=None, dataPath=None, maxRecords=None,
timeoutSec=None, ignoreErrModels=False,
predictionCacheMaxRecords=None, **kwargs):
""" This runs permutations on the given experiment using just 1 worker
Parameters:
-------------------------------------------------------------------
expDirectory: directory containing the description.py and permutations.py
hsImp: which implementation of Hypersearch to use
maxModels: max # of models to generate
maxNumWorkers: max # of workers to use, N/A if onCluster is False
loggingLevel: logging level to use in the Hypersearch worker
onCluster: if True, run on the Hadoop cluster
env: if not None, this is a dict of environment variables
that should be sent to each worker process. These can
aid in re-using the same description/permutations file
for different tests.
waitForCompletion: If True, wait for job to complete before returning
If False, then return resultsInfoForAllModels and
metricResults will be None
continueJobId: If not None, then this is the JobId of a job we want
to continue working on with another worker.
ignoreErrModels: If true, ignore erred models
maxRecords: This value is passed to the function, _generateHSJobParams(),
to represent the maximum number of records to generate for
the operation.
dataPath: This value is passed to the function, _generateHSJobParams(),
which points to the data file for the operation.
predictionCacheMaxRecords:
If specified, determine the maximum number of records in
the prediction cache.
retval: (jobID, jobInfo, resultsInfoForAllModels, metricResults,
minErrScore)
"""
# Put in the path to our datasets
if env is None:
env = dict()
self._setDataPath(env)
# ----------------------------------------------------------------
# Prepare the jobParams
jobParams = self._generateHSJobParams(expDirectory=expDirectory,
hsImp=hsImp, maxModels=maxModels,
maxRecords=maxRecords,
dataPath=dataPath,
predictionCacheMaxRecords=predictionCacheMaxRecords)
jobParams.update(kwargs)
if onCluster:
(jobID, jobInfo, resultInfos, metricResults) \
= self._runPermutationsCluster(jobParams=jobParams,
loggingLevel=loggingLevel,
maxNumWorkers=maxNumWorkers,
env=env,
waitForCompletion=waitForCompletion,
ignoreErrModels=ignoreErrModels,
timeoutSec=timeoutSec)
else:
(jobID, jobInfo, resultInfos, metricResults) \
= self._runPermutationsLocal(jobParams=jobParams,
loggingLevel=loggingLevel,
env=env,
waitForCompletion=waitForCompletion,
continueJobId=continueJobId,
ignoreErrModels=ignoreErrModels)
if not waitForCompletion:
return (jobID, jobInfo, resultInfos, metricResults, None)
# Print job status
print "\n------------------------------------------------------------------"
print "Hadoop completion reason: %s" % (jobInfo.completionReason)
print "Worker completion reason: %s" % (jobInfo.workerCompletionReason)
print "Worker completion msg: %s" % (jobInfo.workerCompletionMsg)
if jobInfo.engWorkerState is not None:
print "\nEngine worker state:"
print "---------------------------------------------------------------"
pprint.pprint(json.loads(jobInfo.engWorkerState))
# Print out best results
minErrScore=None
metricAmts = []
for result in metricResults:
if result is None:
metricAmts.append(numpy.inf)
else:
metricAmts.append(result)
metricAmts = numpy.array(metricAmts)
if len(metricAmts) > 0:
minErrScore = metricAmts.min()
minModelID = resultInfos[metricAmts.argmin()].modelId
# Get model info
cjDAO = ClientJobsDAO.get()
modelParams = cjDAO.modelsGetParams([minModelID])[0].params
print "Model params for best model: \n%s" \
% (pprint.pformat(json.loads(modelParams)))
print "Best model result: %f" % (minErrScore)
else:
print "No models finished"
return (jobID, jobInfo, resultInfos, metricResults, minErrScore)
class OneNodeTests(ExperimentTestBaseClass):
"""
"""
# AWS tests attribute required for tagging via automatic test discovery via
# nosetests
engineAWSClusterTest=True
def setUp(self):
super(OneNodeTests, self).setUp()
if not g_myEnv.options.runInProc:
self.skipTest("Skipping One Node test since runInProc is not specified")
def testSimpleV2(self, onCluster=False, env=None, **kwargs):
"""
Try running simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 350)
return
def testDeltaV2(self, onCluster=False, env=None, **kwargs):
""" Try running a simple permutations with delta encoder
Test which tests the delta encoder. Runs a swarm of the sawtooth dataset
With a functioning delta encoder this should give a perfect result
DEBUG: disabled temporarily because this test takes too long!!!
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'delta')
# Test it out
if env is None:
env = dict()
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_TEST_exitAfterNModels"] = str(20)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
self.assertLess(minErrScore, 0.002)
return
def testSimpleV2NoSpeculation(self, onCluster=False, env=None, **kwargs):
""" Try running a simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
speculativeParticles=False,
**kwargs)
self.assertEqual(minErrScore, 20)
self.assertGreater(len(resultInfos), 1)
self.assertLess(len(resultInfos), 350)
return
def testCLAModelV2(self, onCluster=False, env=None, maxModels=2,
**kwargs):
""" Try running a simple permutations using an actual CLA model, not
a dummy
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'dummyV2')
# Test it out
if env is None:
env = dict()
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=maxModels,
**kwargs)
self.assertEqual(len(resultInfos), maxModels)
return
def testCLAMultistepModel(self, onCluster=False, env=None, maxModels=2,
**kwargs):
""" Try running a simple permutations using an actual CLA model, not
a dummy
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simple_cla_multistep')
# Test it out
if env is None:
env = dict()
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=maxModels,
**kwargs)
self.assertEqual(len(resultInfos), maxModels)
return
def testLegacyCLAMultistepModel(self, onCluster=False, env=None, maxModels=2,
**kwargs):
""" Try running a simple permutations using an actual CLA model, not
a dummy. This is a legacy CLA multi-step model that doesn't declare a
separate 'classifierOnly' encoder for the predicted field.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'legacy_cla_multistep')
# Test it out
if env is None:
env = dict()
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=maxModels,
**kwargs)
self.assertEqual(len(resultInfos), maxModels)
return
def testFilterV2(self, onCluster=False):
""" Try running a simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# Don't allow the consumption encoder maxval to get to it's optimum
# value (which is 250). This increases our errScore by +25.
env = dict()
env["NTA_TEST_maxvalFilter"] = '225'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = '6'
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None)
self.assertEqual(minErrScore, 45)
self.assertLess(len(resultInfos), 400)
return
def testLateWorker(self, onCluster=False):
""" Try running a simple permutations where a worker comes in late,
after the some models have already been evaluated
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
env = dict()
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_TEST_exitAfterNModels"] = '100'
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=None,
onCluster=onCluster,
env=env,
waitForCompletion=True,
)
self.assertEqual(len(resultInfos), 100)
# Run another worker the rest of the way
env.pop("NTA_TEST_exitAfterNModels")
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=None,
onCluster=onCluster,
env=env,
waitForCompletion=True,
continueJobId = jobID,
)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 350)
return
def testOrphanedModel(self, onCluster=False, modelRange=(0,1)):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# NTA_TEST_numIterations is watched by the dummyModelParams() method of
# the permutations file.
# NTA_TEST_sysExitModelRange is watched by the dummyModelParams() method of
# the permutations file. It tells it to do a sys.exit() after so many
# iterations.
# We increase the swarm maturity window to make our unit tests more
# repeatable. There is an element of randomness as to which model
# parameter combinations get evaluated first when running with
# multiple workers, so this insures that we can find the "best" model
# that we expect to see in our unit tests.
env = dict()
env["NTA_TEST_numIterations"] = '2'
env["NTA_TEST_sysExitModelRange"] = '%d,%d' % (modelRange[0], modelRange[1])
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] \
= '%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=300,
onCluster=onCluster,
env=env,
waitForCompletion=False,
)
# At this point, we should have 1 model, still running
(beg, end) = modelRange
self.assertEqual(len(resultInfos), end)
numRunning = 0
for res in resultInfos:
if res.status == ClientJobsDAO.STATUS_RUNNING:
numRunning += 1
self.assertEqual(numRunning, 1)
# Run another worker the rest of the way, after delaying enough time to
# generate an orphaned model
env["NTA_CONF_PROP_nupic_hypersearch_modelOrphanIntervalSecs"] = '1'
time.sleep(2)
# Here we launch another worker to finish up the job. We set the maxModels
# to 300 (200 something should be enough) in case the orphan detection is
# not working, it will make sure we don't loop for excessively long.
# With orphan detection working, we should detect that the first model
# would never complete, orphan it, and create a new one in the 1st sprint.
# Without orphan detection working, we will wait forever for the 1st sprint
# to finish, and will create a bunch of gen 1, then gen2, then gen 3, etc.
# and gen 0 will never finish, so the swarm will never mature.
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=300,
onCluster=onCluster,
env=env,
waitForCompletion=True,
continueJobId = jobID,
)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 350)
return
def testOrphanedModelGen1(self):
""" Run a worker on a model for a while, then have it exit before a
model finishes in gen index 2. Then, run another worker, which should detect
the orphaned model.
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testOrphanedModel(modelRange=(10,11))
def testErredModel(self, onCluster=False, modelRange=(6,7)):
""" Run with 1 or more models generating errors
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# We increase the swarm maturity window to make our unit tests more
# repeatable. There is an element of randomness as to which model
# parameter combinations get evaluated first when running with
# multiple workers, so this insures that we can find the "best" model
# that we expect to see in our unit tests.
env = dict()
env["NTA_TEST_errModelRange"] = '%d,%d' % (modelRange[0], modelRange[1])
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] \
= '%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
ignoreErrModels=True
)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 350)
return
def testJobFailModel(self, onCluster=False, modelRange=(6,7)):
""" Run with 1 or more models generating jobFail exception
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# We increase the swarm maturity window to make our unit tests more
# repeatable. There is an element of randomness as to which model
# parameter combinations get evaluated first when running with
# multiple workers, so this insures that we can find the "best" model
# that we expect to see in our unit tests.
env = dict()
env["NTA_TEST_jobFailErr"] = 'True'
maxNumWorkers = 4
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
maxNumWorkers=maxNumWorkers,
ignoreErrModels=True
)
# Make sure workerCompletionReason was error
self.assertEqual (jobInfo.workerCompletionReason,
ClientJobsDAO.CMPL_REASON_ERROR)
self.assertLess (len(resultInfos), maxNumWorkers+1)
return
def testTooManyErredModels(self, onCluster=False, modelRange=(5,10)):
""" Run with too many models generating errors
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# We increase the swarm maturity window to make our unit tests more
# repeatable. There is an element of randomness as to which model
# parameter combinations get evaluated first when running with
# multiple workers, so this insures that we can find the "best" model
# that we expect to see in our unit tests.
env = dict()
env["NTA_TEST_errModelRange"] = '%d,%d' % (modelRange[0], modelRange[1])
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] \
= '%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
ignoreErrModels=True
)
self.assertEqual (jobInfo.workerCompletionReason,
ClientJobsDAO.CMPL_REASON_ERROR)
return
def testFieldThreshold(self, onCluster=False, env=None, **kwargs):
""" Test minimum field contribution threshold for a field to be included in further sprints
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'field_threshold_temporal')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (0)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (2)
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (100)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance',
'visitor_winloss'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
self.assertEqual( bestModel.optimizedMetric, 75)
#==========================================================================
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (20)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance',
'home_winloss',
'visitor_winloss'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
assert bestModel.optimizedMetric == 55, bestModel.optimizedMetric
#==========================================================================
# Find best combo possible
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (0.0)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance',
'home_winloss',
'precip',
'timestamp_dayOfWeek',
'timestamp_timeOfDay',
'visitor_winloss'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
assert bestModel.optimizedMetric == 25, bestModel.optimizedMetric
def testSpatialClassification(self, onCluster=False, env=None, **kwargs):
"""
Try running a spatial classification swarm
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'spatial_classification')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 350)
# Check the expected field contributions
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
actualFieldContributions = jobResults['fieldContributions']
print "Actual field contributions:", \
pprint.pformat(actualFieldContributions)
expectedFieldContributions = {
'address': 100 * (90.0-30)/90.0,
'gym': 100 * (90.0-40)/90.0,
'timestamp_dayOfWeek': 100 * (90.0-80.0)/90.0,
'timestamp_timeOfDay': 100 * (90.0-90.0)/90.0,
}
for key, value in expectedFieldContributions.items():
self.assertEqual(actualFieldContributions[key], value,
"actual field contribution from field '%s' does not "
"match the expected value of %f" % (key, value))
# Check the expected best encoder combination
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'address',
'gym'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
return
def testAlwaysInputPredictedField(self, onCluster=False, env=None,
**kwargs):
"""
Run a swarm where 'inputPredictedField' is set in the permutations
file. The dummy model for this swarm is designed to give the lowest
error when the predicted field is INCLUDED, so make sure we don't get
this low error
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'input_predicted_field')
# Test it out not requiring the predicted field. This should yield a
# low error score
if env is None:
env = dict()
env["NTA_TEST_inputPredictedField"] = "auto"
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (2)
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
self.assertEqual(minErrScore, -50)
self.assertLess(len(resultInfos), 350)
# Now, require the predicted field. This should yield a high error score
if env is None:
env = dict()
env["NTA_TEST_inputPredictedField"] = "yes"
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (2)
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
self.assertEqual(minErrScore, -40)
self.assertLess(len(resultInfos), 350)
return
def testFieldThresholdNoPredField(self, onCluster=False, env=None, **kwargs):
""" Test minimum field contribution threshold for a field to be included
in further sprints when doing a temporal search that does not require
the predicted field.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'input_predicted_field')
# Test it out without any max field branching in effect
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_TEST_inputPredictedField"] = "auto"
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (0)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (2)
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (0)
if True:
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Verify the best model and check the field contributions.
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'address',
'gym',
'timestamp_dayOfWeek',
'timestamp_timeOfDay'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
self.assertEqual( bestModel.optimizedMetric, -50)
# Check the field contributions
actualFieldContributions = jobResults['fieldContributions']
print "Actual field contributions:", \
pprint.pformat(actualFieldContributions)
expectedFieldContributions = {
'consumption': 0.0,
'address': 100 * (60.0-40.0)/60.0,
'timestamp_timeOfDay': 100 * (60.0-20.0)/60.0,
'timestamp_dayOfWeek': 100 * (60.0-10.0)/60.0,
'gym': 100 * (60.0-30.0)/60.0}
for key, value in expectedFieldContributions.items():
self.assertEqual(actualFieldContributions[key], value,
"actual field contribution from field '%s' does not "
"match the expected value of %f" % (key, value))
if True:
#==========================================================================
# Now test ignoring all fields that contribute less than 55% to the
# error score. This means we can only use the timestamp_timeOfDay and
# timestamp_dayOfWeek fields.
# This should bring our best error score up to 50-30-40 = -20
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (55)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (5)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the best model
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'timestamp_dayOfWeek',
'timestamp_timeOfDay'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
self.assertEqual( bestModel.optimizedMetric, -20)
# Check field contributions returned
actualFieldContributions = jobResults['fieldContributions']
print "Actual field contributions:", \
pprint.pformat(actualFieldContributions)
expectedFieldContributions = {
'consumption': 0.0,
'address': 100 * (60.0-40.0)/60.0,
'timestamp_timeOfDay': 100 * (60.0-20.0)/60.0,
'timestamp_dayOfWeek': 100 * (60.0-10.0)/60.0,
'gym': 100 * (60.0-30.0)/60.0}
for key, value in expectedFieldContributions.items():
self.assertEqual(actualFieldContributions[key], value,
"actual field contribution from field '%s' does not "
"match the expected value of %f" % (key, value))
if True:
#==========================================================================
# Now, test using maxFieldBranching to limit the max number of fields to
# 3. This means we can only use the timestamp_timeOfDay, timestamp_dayOfWeek,
# gym fields.
# This should bring our error score to 50-30-40-20 = -40
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (0)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (3)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the best model
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'gym',
'timestamp_dayOfWeek',
'timestamp_timeOfDay'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
self.assertEqual( bestModel.optimizedMetric, -40)
if True:
#==========================================================================
# Now, test setting max models so that no swarm can finish completely.
# Make sure we get the expected field contributions
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (0)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (5)
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (0)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=10,
dummyModel={'iterations':200},
**kwargs)
# Get the best model
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'timestamp_dayOfWeek'])
self.assertEqual(params["particleState"]["swarmId"],
expectedSwarmId,
"Actual swarm id = %s\nExpcted swarm id = %s" \
% (params["particleState"]["swarmId"],
expectedSwarmId))
self.assertEqual( bestModel.optimizedMetric, 10)
# Check field contributions returned
actualFieldContributions = jobResults['fieldContributions']
print "Actual field contributions:", \
pprint.pformat(actualFieldContributions)
expectedFieldContributions = {
'consumption': 0.0,
'address': 100 * (60.0-40.0)/60.0,
'timestamp_timeOfDay': 100 * (60.0-20.0)/60.0,
'timestamp_dayOfWeek': 100 * (60.0-10.0)/60.0,
'gym': 100 * (60.0-30.0)/60.0}
class MultiNodeTests(ExperimentTestBaseClass):
"""
Test hypersearch on multiple nodes
"""
# AWS tests attribute required for tagging via automatic test discovery via
# nosetests
engineAWSClusterTest=True
def testSimpleV2(self):
""" Try running a simple permutations
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testSimpleV2(onCluster=True) #, maxNumWorkers=7)
def testDeltaV2(self):
""" Try running a simple permutations
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testDeltaV2(onCluster=True) #, maxNumWorkers=7)
def testSmartSpeculation(self, onCluster=True, env=None, **kwargs):
""" Try running a simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'smart_speculation_temporal')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (1)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobInfoStr = cjDAO.jobGetFields(jobID, ['results','engWorkerState'])
jobResultsStr = jobInfoStr[0]
engState = jobInfoStr[1]
engState = json.loads(engState)
swarms = engState["swarms"]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
# Make sure that the only nonkilled models are the ones that would have been
# run without speculation
prefix = 'modelParams|sensorParams|encoders|'
correctOrder = ["A","B","C","D","E","F","G","Pred"]
correctOrder = [prefix + x for x in correctOrder]
for swarm in swarms:
if swarms[swarm]["status"] == 'killed':
swarmId = swarm.split(".")
if(len(swarmId)>1):
# Make sure that something before the last two encoders is in the
# wrong sprint progression, hence why it was killed
# The last encoder is the predicted field and the second to last is
# the current new addition
wrong=0
for i in range(len(swarmId)-2):
if correctOrder[i] != swarmId[i]:
wrong=1
assert wrong==1, "Some of the killed swarms should not have been " \
+ "killed as they are a legal combination."
if swarms[swarm]["status"] == 'completed':
swarmId = swarm.split(".")
if(len(swarmId)>3):
# Make sure that the completed swarms are all swarms that should
# have been run.
# The last encoder is the predicted field and the second to last is
# the current new addition
for i in range(len(swarmId)-3):
if correctOrder[i] != swarmId[i]:
assert False , "Some of the completed swarms should not have " \
"finished as they are illegal combinations"
if swarms[swarm]["status"] == 'active':
assert False , "Some swarms are still active at the end of hypersearch"
pass
def testSmartSpeculationSpatialClassification(self, onCluster=True,
env=None, **kwargs):
""" Test that smart speculation does the right thing with spatial
classification models. This also applies to temporal models where the
predicted field is optional (or excluded) since Hypersearch treats them
the same.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir,
'smart_speculation_spatial_classification')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (1)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
maxNumWorkers=5,
dummyModel={'iterations':200},
**kwargs)
# Get the worker state
cjDAO = ClientJobsDAO.get()
jobInfoStr = cjDAO.jobGetFields(jobID, ['results','engWorkerState'])
jobResultsStr = jobInfoStr[0]
engState = jobInfoStr[1]
engState = json.loads(engState)
swarms = engState["swarms"]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
# Make sure that the only non-killed models are the ones that would have been
# run without speculation
prefix = 'modelParams|sensorParams|encoders|'
correctOrder = ["A","B","C"]
correctOrder = [prefix + x for x in correctOrder]
for swarm in swarms:
if swarms[swarm]["status"] == 'killed':
swarmId = swarm.split(".")
if(len(swarmId) > 1):
# Make sure that the best encoder is not in this swarm
if correctOrder[0] in swarmId:
raise RuntimeError("Some of the killed swarms should not have been "
"killed as they are a legal combination.")
elif swarms[swarm]["status"] == 'completed':
swarmId = swarm.split(".")
if(len(swarmId) >= 2):
# Make sure that the completed swarms are all swarms that should
# have been run.
for i in range(len(swarmId)-1):
if correctOrder[i] != swarmId[i]:
raise RuntimeError("Some of the completed swarms should not have "
"finished as they are illegal combinations")
elif swarms[swarm]["status"] == 'active':
raise RuntimeError("Some swarms are still active at the end of "
"hypersearch")
def testFieldBranching(self, onCluster=True, env=None, **kwargs):
""" Try running a simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'max_branching_temporal')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (4)
env["NTA_CONF_PROP_nupic_hypersearch_min_field_contribution"] = \
'%f' % (-20.0)
env["NTA_CONF_PROP_nupic_hypersearch_minParticlesPerSwarm"] = \
'%d' % (2)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance', 'home_winloss', 'timestamp_dayOfWeek',
'timestamp_timeOfDay', 'visitor_winloss'])
assert params["particleState"]["swarmId"] == expectedSwarmId, \
params["particleState"]["swarmId"]
assert bestModel.optimizedMetric == 432, bestModel.optimizedMetric
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (3)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance', 'home_winloss', 'timestamp_timeOfDay',
'visitor_winloss'])
assert params["particleState"]["swarmId"] == expectedSwarmId, \
params["particleState"]["swarmId"]
assert bestModel.optimizedMetric == 465, bestModel.optimizedMetric
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (5)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance', 'home_winloss', 'precip', 'timestamp_dayOfWeek',
'timestamp_timeOfDay', 'visitor_winloss'])
assert params["particleState"]["swarmId"] == expectedSwarmId, \
params["particleState"]["swarmId"]
assert bestModel.optimizedMetric == 390, bestModel.optimizedMetric
#Find best combo with 3 fields
env["NTA_CONF_PROP_nupic_hypersearch_max_field_branching"] = \
'%d' % (0)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=100,
dummyModel={'iterations':200},
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
bestModel = cjDAO.modelsInfo([jobResults["bestModel"]])[0]
params = json.loads(bestModel.params)
prefix = 'modelParams|sensorParams|encoders|'
expectedSwarmId = prefix + ('.' + prefix).join([
'attendance', 'daynight', 'visitor_winloss'])
assert params["particleState"]["swarmId"] == expectedSwarmId, \
params["particleState"]["swarmId"]
assert bestModel.optimizedMetric == 406, bestModel.optimizedMetric
return
def testFieldThreshold(self, onCluster=True, env=None, **kwargs):
""" Test minimum field contribution threshold for a field to be included in further sprints
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testFieldThreshold(onCluster=True)
def testFieldContributions(self, onCluster=True, env=None, **kwargs):
""" Try running a simple permutations
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'field_contrib_temporal')
# Test it out
if env is None:
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] = \
'%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
onCluster=onCluster,
env=env,
maxModels=None,
**kwargs)
# Get the field contributions from the hypersearch results dict
cjDAO = ClientJobsDAO.get()
jobResultsStr = cjDAO.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
actualFieldContributions = jobResults['fieldContributions']
print "Actual field contributions:", actualFieldContributions
expectedFieldContributions = {'consumption': 0.0,
'address': 0.0,
'timestamp_timeOfDay': 20.0,
'timestamp_dayOfWeek': 50.0,
'gym': 10.0}
for key, value in expectedFieldContributions.items():
self.assertEqual(actualFieldContributions[key], value,
"actual field contribution from field '%s' does not "
"match the expected value of %f" % (key, value))
return
def testCLAModelV2(self):
""" Try running a simple permutations through a real CLA model
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testCLAModelV2(onCluster=True, maxModels=4)
def testCLAMultistepModel(self):
""" Try running a simple permutations through a real CLA model that
uses multistep
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testCLAMultistepModel(onCluster=True, maxModels=4)
def testLegacyCLAMultistepModel(self):
""" Try running a simple permutations through a real CLA model that
uses multistep
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testLegacyCLAMultistepModel(onCluster=True, maxModels=4)
def testSimpleV2VariableWaits(self):
""" Try running a simple permutations where certain field combinations
take longer to complete, this lets us test that we successfully kill
models in bad swarms that are still running.
"""
self._printTestHeader()
# NTA_TEST_variableWaits and NTA_TEST_numIterations are watched by the
# dummyModelParams() method of the permutations.py file
# NTA_TEST_numIterations
env = dict()
env["NTA_TEST_variableWaits"] ='True'
env["NTA_TEST_numIterations"] = '100'
inst = OneNodeTests('testSimpleV2')
return inst.testSimpleV2(onCluster=True, env=env)
def testOrphanedModel(self, modelRange=(0,2)):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'simpleV2')
# NTA_TEST_numIterations is watched by the dummyModelParams() method of
# the permutations file.
# NTA_TEST_sysExitModelRange is watched by the dummyModelParams() method of
# the permutations file. It tells it to do a sys.exit() after so many
# iterations.
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_TEST_sysExitModelRange"] = '%d,%d' % (modelRange[0], modelRange[1])
env["NTA_CONF_PROP_nupic_hypersearch_modelOrphanIntervalSecs"] = '1'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] \
= '%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=500,
onCluster=True,
env=env,
waitForCompletion=True,
maxNumWorkers=4,
)
self.assertEqual(minErrScore, 20)
self.assertLess(len(resultInfos), 500)
return
def testTwoOrphanedModels(self, modelRange=(0,2)):
""" Test behavior when a worker marks 2 models orphaned at the same time.
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'oneField')
# NTA_TEST_numIterations is watched by the dummyModelParams() method of
# the permutations file.
# NTA_TEST_sysExitModelRange is watched by the dummyModelParams() method of
# the permutations file. It tells it to do a sys.exit() after so many
# iterations.
env = dict()
env["NTA_TEST_numIterations"] = '99'
env["NTA_TEST_delayModelRange"] = '%d,%d' % (modelRange[0], modelRange[1])
env["NTA_CONF_PROP_nupic_hypersearch_modelOrphanIntervalSecs"] = '1'
env["NTA_CONF_PROP_nupic_hypersearch_swarmMaturityWindow"] \
= '%d' % (g_repeatableSwarmMaturityWindow)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=100,
onCluster=True,
env=env,
waitForCompletion=True,
maxNumWorkers=4,
)
self.assertEqual(minErrScore, 50)
self.assertLess(len(resultInfos), 100)
return
def testOrphanedModelGen1(self):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
inst = MultiNodeTests(self._testMethodName)
return inst.testOrphanedModel(modelRange=(10,11))
def testOrphanedModelMaxModels(self):
""" Test to make sure that the maxModels parameter doesn't include
orphaned models. Run a test with maxModels set to 2, where one becomes
orphaned. At the end, there should be 3 models in the models table, one
of which will be the new model that adopted the orphaned model
"""
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'dummyV2')
numModels = 5
env = dict()
env["NTA_CONF_PROP_nupic_hypersearch_modelOrphanIntervalSecs"] = '3'
env['NTA_TEST_max_num_models']=str(numModels)
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=numModels,
env=env,
onCluster=True,
waitForCompletion=True,
dummyModel={'metricValue': ['25','50'],
'sysExitModelRange': '0, 1',
'iterations': 20,
}
)
cjDB = ClientJobsDAO.get()
self.assertGreaterEqual(len(resultInfos), numModels+1)
completionReasons = [x.completionReason for x in resultInfos]
self.assertGreaterEqual(completionReasons.count(cjDB.CMPL_REASON_EOF), numModels)
self.assertGreaterEqual(completionReasons.count(cjDB.CMPL_REASON_ORPHAN), 1)
def testOrphanedModelConnection(self):
"""Test for the correct behavior when a model uses a different connection id
than what is stored in the db. The correct behavior is for the worker to log
this as a warning and move on to a new model"""
self._printTestHeader()
# -----------------------------------------------------------------------
# Trigger "Using connection from another worker" exception inside
# ModelRunner
# -----------------------------------------------------------------------
expDir = os.path.join(g_myEnv.testSrcExpDir, 'dummy_multi_v2')
numModels = 2
env = dict()
env["NTA_CONF_PROP_nupic_hypersearch_modelOrphanIntervalSecs"] = '1'
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=numModels,
env=env,
onCluster=True,
waitForCompletion=True,
dummyModel={'metricValue': ['25','50'],
'sleepModelRange': '0, 1:5',
'iterations': 20,
}
)
cjDB = ClientJobsDAO.get()
self.assertGreaterEqual(len(resultInfos), numModels,
"%d were run. Expecting %s"%(len(resultInfos), numModels+1))
completionReasons = [x.completionReason for x in resultInfos]
self.assertGreaterEqual(completionReasons.count(cjDB.CMPL_REASON_EOF), numModels)
self.assertGreaterEqual(completionReasons.count(cjDB.CMPL_REASON_ORPHAN), 1)
def testErredModel(self, modelRange=(6,7)):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testErredModel(onCluster=True)
def testJobFailModel(self):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testJobFailModel(onCluster=True)
def testTooManyErredModels(self, modelRange=(5,10)):
""" Run a worker on a model for a while, then have it exit before the
model finishes. Then, run another worker, which should detect the orphaned
model.
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testTooManyErredModels(onCluster=True)
def testSpatialClassification(self):
""" Try running a simple permutations
"""
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testSpatialClassification(onCluster=True) #, maxNumWorkers=7)
def testAlwaysInputPredictedField(self):
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testAlwaysInputPredictedField(onCluster=True)
def testFieldThresholdNoPredField(self):
self._printTestHeader()
inst = OneNodeTests(self._testMethodName)
return inst.testFieldThresholdNoPredField(onCluster=True)
class ModelMaturityTests(ExperimentTestBaseClass):
"""
"""
# AWS tests attribute required for tagging via automatic test discovery via
# nosetests
engineAWSClusterTest=True
def setUp(self):
# Ignore the global hypersearch version setting. Always test hypersearch v2
hsVersion = 2
self.expDir = os.path.join(g_myEnv.testSrcExpDir, 'dummyV%d' %hsVersion)
self.hsImp = "v%d" % hsVersion
self.env = {'NTA_CONF_PROP_nupic_hypersearch_enableModelTermination':'0',
'NTA_CONF_PROP_nupic_hypersearch_enableModelMaturity':'1',
'NTA_CONF_PROP_nupic_hypersearch_maturityMaxSlope':'0.1',
'NTA_CONF_PROP_nupic_hypersearch_enableSwarmTermination':'0',
'NTA_CONF_PROP_nupic_hypersearch_bestModelMinRecords':'0'}
def testMatureInterleaved(self):
""" Test to make sure that the best model continues running even when it has
matured. The 2nd model (constant) will be marked as mature first and will
continue to run till the end. The 2nd model reaches maturity and should
stop before all the records are consumed, and should be the best model
because it has a lower error
"""
self._printTestHeader()
self.expDir = os.path.join(g_myEnv.testSrcExpDir,
'dummy_multi_v%d' % 2)
self.env['NTA_TEST_max_num_models'] = '2'
jobID,_,_,_,_ = self.runPermutations(self.expDir, hsImp=self.hsImp, maxModels=2,
loggingLevel = g_myEnv.options.logLevel,
env = self.env,
onCluster = True,
dummyModel={'metricFunctions':
['lambda x: -10*math.log10(x+1) +100',
'lambda x: 100.0'],
'delay': [2.0,
0.0 ],
'waitTime':[0.05,
0.01],
'iterations':500,
'experimentDirectory':self.expDir,
})
cjDB = ClientJobsDAO.get()
modelIDs, records, completionReasons, matured = \
zip(*self.getModelFields( jobID, ['numRecords',
'completionReason',
'engMatured']))
results = cjDB.jobGetFields(jobID, ['results'])[0]
results = json.loads(results)
self.assertEqual(results['bestModel'], modelIDs[0])
self.assertEqual(records[1], 500)
self.assertTrue(records[0] > 100 and records[0] < 500,
"Model 2 num records: 100 < %d < 500 " % records[1])
self.assertEqual(completionReasons[1], cjDB.CMPL_REASON_EOF)
self.assertEqual(completionReasons[0], cjDB.CMPL_REASON_STOPPED)
self.assertTrue(matured[0], True)
def testConstant(self):
""" Sanity check to make sure that when only 1 model is running, it continues
to run even when it has reached maturity """
self._printTestHeader()
jobID,_,_,_,_ = self.runPermutations(self.expDir, hsImp=self.hsImp, maxModels=1,
loggingLevel = g_myEnv.options.logLevel,
env = self.env,
dummyModel={'metricFunctions':
['lambda x: 100'],
'iterations':350,
'experimentDirectory':self.expDir,
})
cjDB = ClientJobsDAO.get()
modelIDs = cjDB.jobGetModelIDs(jobID)
dbResults = cjDB.modelsGetFields(modelIDs, ['numRecords', 'completionReason',
'engMatured'])
modelIDs = [x[0] for x in dbResults]
records = [x[1][0] for x in dbResults]
completionReasons = [x[1][1] for x in dbResults]
matured = [x[1][2] for x in dbResults]
results = cjDB.jobGetFields(jobID, ['results'])[0]
results = json.loads(results)
self.assertEqual(results['bestModel'], min(modelIDs))
self.assertEqual(records[0], 350)
self.assertEqual(completionReasons[0], cjDB.CMPL_REASON_EOF)
self.assertEqual(matured[0], True)
def getModelFields(self, jobID, fields):
cjDB = ClientJobsDAO.get()
modelIDs = cjDB.jobGetModelIDs(jobID)
modelParams = cjDB.modelsGetFields(modelIDs, ['params']+fields)
modelIDs = [e[0] for e in modelParams]
modelOrders = [json.loads(e[1][0])['structuredParams']['__model_num'] for e in modelParams]
modelFields = []
for f in xrange(len(fields)):
modelFields.append([e[1][f+1] for e in modelParams])
modelInfo = zip(modelOrders, modelIDs, *tuple(modelFields))
modelInfo.sort(key=lambda info:info[0])
return [e[1:] for e in sorted(modelInfo, key=lambda info:info[0])]
class SwarmTerminatorTests(ExperimentTestBaseClass):
"""
"""
# AWS tests attribute required for tagging via automatic test discovery via
# nosetests
engineAWSClusterTest=True
def setUp(self):
self.env = {'NTA_CONF_PROP_nupic_hypersearch_enableModelMaturity':'0',
'NTA_CONF_PROP_nupic_hypersearch_enableModelTermination':'0',
'NTA_CONF_PROP_nupic_hypersearch_enableSwarmTermination':'1',
'NTA_TEST_recordSwarmTerminations':'1'}
def testSimple(self, useCluster=False):
"""Run with one really bad swarm to see if terminator picks it up correctly"""
if not g_myEnv.options.runInProc:
self.skipTest("Skipping One Node test since runInProc is not specified")
self._printTestHeader()
expDir = os.path.join(g_myEnv.testSrcExpDir, 'swarm_v2')
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=None,
onCluster=useCluster,
env=self.env,
dummyModel={'iterations':200})
cjDB = ClientJobsDAO.get()
jobResultsStr = cjDB.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
terminatedSwarms = jobResults['terminatedSwarms']
swarmMaturityWindow = int(configuration.Configuration.get(
'nupic.hypersearch.swarmMaturityWindow'))
prefix = 'modelParams|sensorParams|encoders|'
for swarm, (generation, scores) in terminatedSwarms.iteritems():
if prefix + 'gym' in swarm.split('.'):
self.assertEqual(generation, swarmMaturityWindow-1)
else:
self.assertEqual(generation, swarmMaturityWindow-1+4)
def testMaturity(self, useCluster=False):
if not g_myEnv.options.runInProc:
self.skipTest("Skipping One Node test since runInProc is not specified")
self._printTestHeader()
self.env['NTA_CONF_PROP_enableSwarmTermination'] = '0'
expDir = os.path.join(g_myEnv.testSrcExpDir, 'swarm_maturity_v2')
(jobID, jobInfo, resultInfos, metricResults, minErrScore) \
= self.runPermutations(expDir,
hsImp='v2',
loggingLevel=g_myEnv.options.logLevel,
maxModels=None,
onCluster=useCluster,
env=self.env,
dummyModel={'iterations':200})
cjDB = ClientJobsDAO.get()
jobResultsStr = cjDB.jobGetFields(jobID, ['results'])[0]
jobResults = json.loads(jobResultsStr)
terminatedSwarms = jobResults['terminatedSwarms']
swarmMaturityWindow = int(configuration.Configuration.get(
'nupic.hypersearch.swarmMaturityWindow'))
prefix = 'modelParams|sensorParams|encoders|'
for swarm, (generation, scores) in terminatedSwarms.iteritems():
encoders = swarm.split('.')
if prefix + 'gym' in encoders:
self.assertEqual(generation, swarmMaturityWindow-1 + 3)
elif prefix + 'address' in encoders:
self.assertEqual(generation, swarmMaturityWindow-1)
else:
self.assertEqual(generation, swarmMaturityWindow-1 + 7)
def testSimpleMN(self):
self.testSimple(useCluster=True)
def testMaturityMN(self):
self.testMaturity(useCluster=True)
def getHypersearchWinningModelID(jobID):
"""
Parameters:
-------------------------------------------------------------------
jobID: jobID of successfully-completed Hypersearch job
retval: modelID of the winning model
"""
cjDAO = ClientJobsDAO.get()
jobResults = cjDAO.jobGetFields(jobID, ['results'])[0]
print "Hypersearch job results: %r" % (jobResults,)
jobResults = json.loads(jobResults)
return jobResults['bestModel']
def _executeExternalCmdAndReapStdout(args):
"""
args: Args list as defined for the args parameter in subprocess.Popen()
Returns: result dicionary:
{
'exitStatus':<exit-status-of-external-command>,
'stdoutData':"string",
'stderrData':"string"
}
"""
_debugOut(("_executeExternalCmdAndReapStdout: Starting...\n<%s>") % \
(args,))
p = subprocess.Popen(args,
env=os.environ,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_debugOut(("Process started for <%s>") % (args,))
(stdoutData, stderrData) = p.communicate()
_debugOut(("Process completed for <%s>: exit status=%s, stdoutDataType=%s, " + \
"stdoutData=<%s>, stderrData=<%s>") % \
(args, p.returncode, type(stdoutData), stdoutData, stderrData))
result = dict(
exitStatus = p.returncode,
stdoutData = stdoutData,
stderrData = stderrData,
)
_debugOut(("_executeExternalCmdAndReapStdout for <%s>: result=\n%s") % \
(args, pprint.pformat(result, indent=4)))
return result
def _debugOut(text):
global g_debug
if g_debug:
print text
sys.stdout.flush()
return
def _getTestList():
""" Get the list of tests that can be run from this module"""
suiteNames = [
'OneNodeTests',
'MultiNodeTests',
'ModelMaturityTests',
'SwarmTerminatorTests',
]
testNames = []
for suite in suiteNames:
for f in dir(eval(suite)):
if f.startswith('test'):
testNames.append('%s.%s' % (suite, f))
return testNames
class _ArgParser(object):
"""Class which handles command line arguments and arguments passed to the test
"""
args = []
@classmethod
def _processArgs(cls):
"""
Parse our command-line args/options and strip them from sys.argv
Returns the tuple (parsedOptions, remainingArgs)
"""
helpString = \
"""%prog [options...] [-- unittestoptions...] [suitename.testname | suitename]
Run the Hypersearch unit tests. To see unit test framework options, enter:
python %prog -- --help
Example usages:
python %prog MultiNodeTests
python %prog MultiNodeTests.testOrphanedModel
python %prog -- MultiNodeTests.testOrphanedModel
python %prog -- --failfast
python %prog -- --failfast OneNodeTests.testOrphanedModel
Available suitename.testnames: """
# Update help string
allTests = _getTestList()
for test in allTests:
helpString += "\n %s" % (test)
# ============================================================================
# Process command line arguments
parser = OptionParser(helpString,conflict_handler="resolve")
parser.add_option("--verbosity", default=0, type="int",
help="Verbosity level, either 0, 1, 2, or 3 [default: %default].")
parser.add_option("--runInProc", action="store_true", default=False,
help="Run inProc tests, currently inProc are not being run by default "
" running. [default: %default].")
parser.add_option("--logLevel", action="store", type="int",
default=logging.INFO,
help="override default log level. Pass in an integer value that "
"represents the desired logging level (10=logging.DEBUG, "
"20=logging.INFO, etc.) [default: %default].")
parser.add_option("--hs", dest="hsVersion", default=2, type='int',
help=("Hypersearch version (only 2 supported; 1 was "
"deprecated) [default: %default]."))
return parser.parse_args(args=cls.args)
@classmethod
def parseArgs(cls):
""" Returns the test arguments after parsing
"""
return cls._processArgs()[0]
@classmethod
def consumeArgs(cls):
""" Consumes the test arguments and returns the remaining arguments meant
for unittest.man
"""
return cls._processArgs()[1]
def setUpModule():
print "\nCURRENT DIRECTORY:", os.getcwd()
initLogging(verbose=True)
global g_myEnv
# Setup our environment
g_myEnv = MyTestEnvironment()
if __name__ == '__main__':
# Form the command line for the unit test framework
# Consume test specific arguments and pass remaining to unittest.main
_ArgParser.args = sys.argv[1:]
args = [sys.argv[0]] + _ArgParser.consumeArgs()
# Run the tests if called using python
unittest.main(argv=args)
| passiweinberger/nupic | tests/swarming/nupic/swarming/swarming_test.py | Python | agpl-3.0 | 101,562 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import os
import signal
import socket
import time
import traceback
from datetime import datetime
from multiprocessing import Process
from os.path import abspath
from os.path import dirname
from os.path import expanduser
from os.path import join
from os.path import realpath
import mock
import pyotp
import requests
import tbselenium.common as cm
from selenium import webdriver
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.remote_connection import LOGGER
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from sqlalchemy.exc import IntegrityError
from tbselenium.tbdriver import TorBrowserDriver
import journalist_app
import source_app
import tests.utils.env as env
from db import db
from models import Journalist
from sdconfig import config
os.environ["SECUREDROP_ENV"] = "test"
FUNCTIONAL_TEST_DIR = abspath(dirname(__file__))
LOGFILE_PATH = abspath(join(FUNCTIONAL_TEST_DIR, "firefox.log"))
FILES_DIR = abspath(join(dirname(realpath(__file__)), "../..", "tests/files"))
FIREFOX_PATH = "/usr/bin/firefox/firefox"
TBB_PATH = abspath(join(expanduser("~"), ".local/tbb/tor-browser_en-US/"))
os.environ["TBB_PATH"] = TBB_PATH
TBBRC = join(TBB_PATH, "Browser/TorBrowser/Data/Tor/torrc")
LOGGER.setLevel(logging.WARNING)
# https://stackoverflow.com/a/34795883/837471
class alert_is_not_present(object):
""" Expect an alert to not be present."""
def __call__(self, driver):
try:
alert = driver.switch_to.alert
alert.text
return False
except NoAlertPresentException:
return True
class FunctionalTest(object):
use_firefox = False
driver = None
accept_languages = None
_firefox_driver = None
_torbrowser_driver = None
gpg = None
new_totp = None
timeout = 10
secret_message = "These documents outline a major government invasion of privacy."
def _unused_port(self):
s = socket.socket()
s.bind(("127.0.0.1", 0))
port = s.getsockname()[1]
s.close()
return port
def _create_torbrowser_driver(self):
logging.info("Creating TorBrowserDriver")
log_file = open(LOGFILE_PATH, "a")
log_file.write("\n\n[%s] Running Functional Tests\n" % str(datetime.now()))
log_file.flush()
# Don't use Tor when reading from localhost, and turn off private
# browsing. We need to turn off private browsing because we won't be
# able to access the browser's cookies in private browsing mode. Since
# we use session cookies in SD anyway (in private browsing mode all
# cookies are set as session cookies), this should not affect session
# lifetime.
pref_dict = {
"network.proxy.no_proxies_on": "127.0.0.1",
"browser.privatebrowsing.autostart": False,
}
if self.accept_languages is not None:
pref_dict["intl.accept_languages"] = self.accept_languages
self._torbrowser_driver = TorBrowserDriver(
TBB_PATH, tor_cfg=cm.USE_RUNNING_TOR, pref_dict=pref_dict, tbb_logfile_path=LOGFILE_PATH
)
logging.info("Created Tor Browser driver")
def _create_firefox_driver(self, profile=None):
logging.info("Creating Firefox driver")
if profile is None:
profile = webdriver.FirefoxProfile()
if self.accept_languages is not None:
profile.set_preference("intl.accept_languages", self.accept_languages)
profile.update_preferences()
self._firefox_driver = webdriver.Firefox(
firefox_binary=FIREFOX_PATH, firefox_profile=profile
)
self._firefox_driver.set_window_position(0, 0)
self._firefox_driver.set_window_size(1024, 768)
self._firefox_driver.implicitly_wait(self.timeout)
logging.info("Created Firefox driver")
def disable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", False)
def enable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", True)
def switch_to_firefox_driver(self):
self.driver = self._firefox_driver
def switch_to_torbrowser_driver(self):
self.driver = self._torbrowser_driver
def setup(self, session_expiration=30):
env.create_directories()
self.gpg = env.init_gpg()
self.__context = journalist_app.create_app(config).app_context()
self.__context.push()
# Patch the two-factor verification to avoid intermittent errors
self.patcher = mock.patch("models.Journalist.verify_token")
self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
self.patcher2 = mock.patch("source_app.main.get_entropy_estimate")
self.mock_get_entropy_estimate = self.patcher2.start()
self.mock_get_entropy_estimate.return_value = 8192
signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s))
env.create_directories()
db.create_all()
# Add our test user
try:
valid_password = "correct horse battery staple profanity oil chewy"
user = Journalist(username="journalist", password=valid_password, is_admin=True)
user.otp_secret = "JHCOGO7VCER3EJ4L"
db.session.add(user)
db.session.commit()
except IntegrityError:
logging.error("Test user already added")
db.session.rollback()
# This user is required for our tests cases to login
self.admin_user = {
"name": "journalist",
"password": ("correct horse battery staple" " profanity oil chewy"),
"secret": "JHCOGO7VCER3EJ4L",
}
self.admin_user["totp"] = pyotp.TOTP(self.admin_user["secret"])
source_port = self._unused_port()
journalist_port = self._unused_port()
self.source_location = "http://127.0.0.1:%d" % source_port
self.journalist_location = "http://127.0.0.1:%d" % journalist_port
# Allow custom session expiration lengths
self.session_expiration = session_expiration
self.source_app = source_app.create_app(config)
self.journalist_app = journalist_app.create_app(config)
def start_source_server(app):
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
app.run(port=source_port, debug=True, use_reloader=False, threaded=True)
def start_journalist_server(app):
app.run(port=journalist_port, debug=True, use_reloader=False, threaded=True)
self.source_process = Process(target=lambda: start_source_server(self.source_app))
self.journalist_process = Process(
target=lambda: start_journalist_server(self.journalist_app)
)
self.source_process.start()
self.journalist_process.start()
for tick in range(30):
try:
requests.get(self.source_location, timeout=1)
requests.get(self.journalist_location, timeout=1)
except Exception:
time.sleep(0.5)
else:
break
self._create_torbrowser_driver()
self._create_firefox_driver()
if self.use_firefox:
self.switch_to_firefox_driver()
else:
self.switch_to_torbrowser_driver()
# Polls the DOM to wait for elements. To read more about why
# this is necessary:
#
# http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html
#
# A value of 5 is known to not be enough in some cases, when
# the machine hosting the tests is slow, reason why it was
# raised to 10. Setting the value to 60 or more would surely
# cover even the slowest of machine. However it also means
# that a test failing to find the desired element in the DOM
# will only report failure after 60 seconds which is painful
# for quickly debuging.
#
self.driver.implicitly_wait(self.timeout)
def wait_for_source_key(self, source_name):
filesystem_id = self.source_app.crypto_util.hash_codename(source_name)
def key_available(filesystem_id):
assert self.source_app.crypto_util.getkey(filesystem_id)
self.wait_for(lambda: key_available(filesystem_id), timeout=60)
def teardown(self):
if self._torbrowser_driver:
self._torbrowser_driver.quit()
if self._firefox_driver:
self._firefox_driver.quit()
self.patcher.stop()
env.teardown()
self.source_process.terminate()
self.journalist_process.terminate()
self.__context.pop()
def create_new_totp(self, secret):
self.new_totp = pyotp.TOTP(secret)
def wait_for(self, function_with_assertion, timeout=None):
"""Polling wait for an arbitrary assertion."""
# Thanks to
# http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions
if timeout is None:
timeout = self.timeout
start_time = time.time()
while time.time() - start_time < timeout:
try:
return function_with_assertion()
except (AssertionError, WebDriverException):
time.sleep(0.1)
# one more try, which will raise any errors if they are outstanding
return function_with_assertion()
def safe_click_by_id(self, element_id):
WebDriverWait(self.driver, self.timeout).until(
expected_conditions.element_to_be_clickable((By.ID, element_id))
)
el = self.wait_for(lambda: self.driver.find_element_by_id(element_id))
el.location_once_scrolled_into_view
ActionChains(self.driver).move_to_element(el).click().perform()
def safe_click_by_css_selector(self, selector):
WebDriverWait(self.driver, self.timeout).until(
expected_conditions.element_to_be_clickable((By.CSS_SELECTOR, selector))
)
el = self.wait_for(lambda: self.driver.find_element_by_css_selector(selector))
el.location_once_scrolled_into_view
ActionChains(self.driver).move_to_element(el).click().perform()
def safe_click_all_by_css_selector(self, selector, root=None):
if root is None:
root = self.driver
els = self.wait_for(lambda: root.find_elements_by_css_selector(selector))
for el in els:
el.location_once_scrolled_into_view
self.wait_for(lambda: el.is_enabled() and el.is_displayed())
ActionChains(self.driver).move_to_element(el).click().perform()
def _alert_wait(self, timeout=None):
if timeout is None:
timeout = self.timeout
WebDriverWait(self.driver, timeout).until(
expected_conditions.alert_is_present(), "Timed out waiting for confirmation popup."
)
def _alert_accept(self):
self.driver.switch_to.alert.accept()
WebDriverWait(self.driver, self.timeout).until(
alert_is_not_present(), "Timed out waiting for confirmation popup to disappear."
)
def _alert_dismiss(self):
self.driver.switch_to.alert.dismiss()
WebDriverWait(self.driver, self.timeout).until(
alert_is_not_present(), "Timed out waiting for confirmation popup to disappear."
)
| ehartsuyker/securedrop | securedrop/tests/functional/functional_test.py | Python | agpl-3.0 | 11,846 |
# -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
"""
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_change_request(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None)
change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
}
| mohamedhagag/community-addons | crm_change_request/models/change_request.py | Python | agpl-3.0 | 3,485 |
# -*- coding: utf-8 -*-
{
'name': "website_register_b2b",
'summary': """
Registration form for site purchases """,
'description': """
Registration form for site purchases
""",
'author': "Alexsandro Haag <[email protected]>, HGSOFT",
'website': "http://www.hgsoft.com.br",
'category': 'Website',
'version': '10.0.1',
'depends': ['base','website_sale'],
'data': [
'views/register.xml',
],
'installable': True,
'auto_install': False,
}
| alexsandrohaag/odoo-website-addons | website_register_b2b/__manifest__.py | Python | agpl-3.0 | 514 |
"""
Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers.
"""
import csv
import logging
from collections import Counter, defaultdict
from time import sleep
from uuid import UUID
from django.core.management import BaseCommand
from ecommerce.core.constants import COUPON_PRODUCT_CLASS_NAME
from ecommerce.extensions.offer.models import OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER
from ecommerce.programs.custom import get_model
ConditionalOffer = get_model('offer', 'ConditionalOffer')
Product = get_model('catalogue', 'Product')
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Command(BaseCommand):
"""
Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers.
"""
def add_arguments(self, parser):
parser.add_argument(
'--data-csv',
action='store',
dest='data_csv',
default=None,
help='Path of csv to read enterprise uuids and opportunity ids.',
type=str,
)
parser.add_argument(
'--contract-type',
action='store',
dest='contract_type',
default='single',
choices=['single', 'multi'],
help='Specify type of backfilling',
type=str,
)
parser.add_argument(
'--batch-limit',
action='store',
dest='batch_limit',
default=100,
help='Number of records to be fetched in each batch of backfilling.',
type=int,
)
parser.add_argument(
'--batch-offset',
action='store',
dest='batch_offset',
default=0,
help='Which index to start batching from.',
type=int,
)
parser.add_argument(
'--batch-sleep',
action='store',
dest='batch_sleep',
default=10,
help='How long to sleep between batches.',
type=int,
)
def read_csv(self, csv_path):
data = {}
with open(csv_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
data[UUID(row['enterprise_customer_uuid'])] = row['opportunity_id']
return data
def read_multi_contracts_csv(self, csv_path):
data = {
'coupons': defaultdict(list),
'offers': defaultdict(list),
'ec_uuids': defaultdict(list),
}
with open(csv_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
if row['ORDER_LINE_OFFER_TYPE'] == 'Voucher':
data['coupons'][row['ORDER_LINE_COUPON_ID']].append(row['OPP_ID'])
elif row['ORDER_LINE_OFFER_TYPE'] in ('Site', 'User'):
data['offers'][row['ORDER_LINE_OFFER_ID']].append(row['OPP_ID'])
else:
data['ec_uuids'][UUID(row['ENTERPRISE_CUSTOMER_UUID'])].append(row['OPP_ID'])
# condition the data so that at the end we have only one opportunity id for each coupon/offer
for __, category_data in data.items():
for category_object_id, opportunity_ids in category_data.items():
if len(opportunity_ids) > 1:
most_common_opportunity_id, __ = Counter(opportunity_ids).most_common(1)[0]
category_data[category_object_id] = most_common_opportunity_id
else:
category_data[category_object_id] = opportunity_ids[0]
return data
def get_enterprise_coupons_batch(self, coupon_filter, start, end):
logger.info('Fetching new batch of enterprise coupons from indexes: %s to %s', start, end)
return Product.objects.filter(**coupon_filter)[start:end]
def get_enterprise_offers_batch(self, offer_filter, start, end):
return ConditionalOffer.objects.filter(**offer_filter)[start:end]
def _backfill_enterprise_coupons(self, data, options, coupon_filter):
batch_limit = options['batch_limit']
batch_sleep = options['batch_sleep']
batch_offset = options['batch_offset']
current_batch_index = batch_offset
logger.info('Started Backfilling Enterprise Coupons...')
coupons = self.get_enterprise_coupons_batch(coupon_filter, batch_offset, batch_offset + batch_limit)
while coupons:
for coupon in coupons:
opportunity_id = data.get(str(coupon.id)) or data.get(UUID(coupon.attr.enterprise_customer_uuid))
if getattr(coupon.attr, 'sales_force_id', None) is None and opportunity_id:
logger.info(
'Enterprise Coupon updated. CouponID: [%s], OpportunityID: [%s]',
coupon.id,
opportunity_id
)
coupon.attr.sales_force_id = opportunity_id
coupon.save()
sleep(batch_sleep)
current_batch_index += len(coupons)
coupons = self.get_enterprise_coupons_batch(
coupon_filter, current_batch_index, current_batch_index + batch_limit
)
logger.info('Backfilling for Enterprise Coupons finished.')
def _backfill_offers(self, data, options, offer_filter, log_prefix):
logger.info('[%s] Started Backfilling Offers...', log_prefix)
batch_limit = options['batch_limit']
batch_sleep = options['batch_sleep']
batch_offset = options['batch_offset']
current_batch_index = batch_offset
ent_offers = self.get_enterprise_offers_batch(offer_filter, batch_offset, batch_offset + batch_limit)
while ent_offers:
for ent_offer in ent_offers:
opportunity_id = data.get(str(ent_offer.id)) or data.get(ent_offer.condition.enterprise_customer_uuid)
if bool(ent_offer.sales_force_id) is False and opportunity_id:
logger.info(
'[%s] Offer updated. OfferID: [%s], OpportunityID: [%s]',
log_prefix,
ent_offer.id,
opportunity_id,
)
ent_offer.sales_force_id = opportunity_id
ent_offer.save()
sleep(batch_sleep)
current_batch_index += len(ent_offers)
ent_offers = self.get_enterprise_offers_batch(
offer_filter, current_batch_index, current_batch_index + batch_limit
)
logger.info('[%s] Backfilling for Offers finished.', log_prefix)
def handle(self, *args, **options):
if options['contract_type'] == 'single':
logger.info('Backfilling for single contracts.')
self.backfill_single_contracts(options)
elif options['contract_type'] == 'multi':
logger.info('Backfilling for multi contracts.')
self.backfill_multi_contracts(options)
def backfill_single_contracts(self, options):
data = self.read_csv(options['data_csv'])
self._backfill_enterprise_coupons(data, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'attributes__code': 'enterprise_customer_uuid',
'attribute_values__value_text__in': data.keys()
})
self._backfill_offers(data, options, {
'offer_type': ConditionalOffer.SITE,
'priority': OFFER_PRIORITY_ENTERPRISE,
'condition__enterprise_customer_uuid__in': data.keys(),
}, 'ENTERPRISE OFFER')
self._backfill_offers(data, options, {
'offer_type': ConditionalOffer.USER,
'priority': OFFER_PRIORITY_MANUAL_ORDER,
'condition__enterprise_customer_uuid__in': data.keys(),
}, 'ENTERPRISE MANUAL ORDER OFFER')
def backfill_multi_contracts(self, options):
data = self.read_multi_contracts_csv(options['data_csv'])
coupons_data = data['coupons']
self._backfill_enterprise_coupons(coupons_data, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'id__in': coupons_data.keys()
})
offers_data = data['offers']
self._backfill_offers(offers_data, options, {
'offer_type__in': (ConditionalOffer.SITE, ConditionalOffer.USER),
'priority__in': (OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER),
'id__in': offers_data.keys(),
}, 'ALL ENTERPRISE OFFERS')
# backfill coupons and offers missing both coupon id and offer id
ec_uuids = data['ec_uuids']
self._backfill_enterprise_coupons(ec_uuids, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'attributes__code': 'enterprise_customer_uuid',
'attribute_values__value_text__in': ec_uuids.keys()
})
self._backfill_offers(ec_uuids, options, {
'offer_type': ConditionalOffer.SITE,
'priority': OFFER_PRIORITY_ENTERPRISE,
'condition__enterprise_customer_uuid__in': ec_uuids.keys(),
}, 'ENTERPRISE OFFER')
self._backfill_offers(ec_uuids, options, {
'offer_type': ConditionalOffer.USER,
'priority': OFFER_PRIORITY_MANUAL_ORDER,
'condition__enterprise_customer_uuid__in': ec_uuids.keys(),
}, 'ENTERPRISE MANUAL ORDER OFFER')
| edx/ecommerce | ecommerce/enterprise/management/commands/backfill_opportunity_ids.py | Python | agpl-3.0 | 9,523 |
import copy
from django.db.models.fields.related import ForeignKey, OneToOneField
from rest_framework import mixins
from rest_framework.generics import (
GenericAPIView, ListAPIView, ListCreateAPIView, RetrieveAPIView,
RetrieveUpdateDestroyAPIView
)
from api.generics.serializers import (
DynamicFieldsModelSerializer, DynamicFieldsSerializer
)
class DynamicView(GenericAPIView):
# foreign / one-to-one fields that can be used with select_related()
select_related_fields = []
serializer_fields = []
field_source_mapping = {}
fields = ()
selectable_fields = ()
def __init__(self, *args, **kwargs):
"""
Extract prefetches and default fields from Meta
"""
# TODO: move this to a meta class, to evaluate once when defining the
# class
# TODO: This is not efficient - 2016-01-20
serializer_class = self.get_serializer_class()
serializer = serializer_class() # need an instance to extract fields
model = serializer_class.Meta.model
assert issubclass(
serializer_class, DynamicFieldsModelSerializer
) or issubclass(serializer_class, DynamicFieldsSerializer), (
"serializer class must be an instance of \
DynamicFieldsModelSerializer " "instead got %s"
) % (serializer_class.__name__,)
self.serializer_fields = serializer.fields.keys()
self.select_related_fields = [
field.name for field in model._meta.fields
if isinstance(field, (ForeignKey, OneToOneField))
]
self.field_source_mapping = {
field.field_name: field.source
for field in serializer.fields.values()
if isinstance(
field, (ForeignKey, OneToOneField)
)
}
def _get_query_fields(self):
if not self.request:
return ()
request_fields = self.request.query_params.get('fields')
# if requested query fields is set to `all` we will return all
# serializer fields defined in serializer class. Here we assign
# `self.fields = ()` so that it will be assigned all serializer
# fields in `filter_queryset` method.
if request_fields and request_fields == 'all':
self.fields = ()
self.selectable_fields = (self.selectable_fields + tuple(
self.serializer_fields))
elif request_fields:
for request_field in request_fields.split(','):
if request_field not in list(self.fields):
# put selectable fields together with required fields
# defined in the class
self.fields = self.fields + (request_field,)
# just in case if you want to know which of fields
# we get as selectable field
self.selectable_fields = self.selectable_fields+(request_field,) # NOQA: E501
# Some bugs if request fields has 'aggregations'
# So we need to remove it from request fields.
# And assign a tuple fields without aggregations
fields = list(self.fields)
try:
fields.remove('aggregations')
except ValueError:
pass
# Assign it again
self.fields = tuple(fields)
return getattr(self, 'fields', ())
def filter_queryset(self, queryset, *args, **kwargs):
"""
Prefetches based on 'fields' GET arg
"""
filter_fields = copy.deepcopy(self.request.query_params)
if 'fields' in filter_fields:
filter_fields.pop('fields')
if 'format' in filter_fields:
filter_fields.pop('format')
if 'page' in filter_fields:
filter_fields.pop('page')
if 'page_size' in filter_fields:
filter_fields.pop('page_size')
if 'ordering' in filter_fields:
filter_fields.pop('ordering')
if 'q'in filter_fields:
filter_fields.pop('q')
if 'q_fields' in filter_fields:
filter_fields.pop('q_fields')
for filter_field in filter_fields:
found = False
try:
declared_filters = self.filter_class.declared_filters
for key in declared_filters:
if filter_field == key:
found = True
if found is False:
# make error in the code to fail
# if input wrong filter name.
setattr(self, 'filter_class', 'No Filter Class')
break
except AttributeError:
pass
fields = self._get_query_fields(*args, **kwargs)
if not fields:
fields = self.serializer_fields
select_related_fields = list(set(
self.select_related_fields
) & set(fields))
if select_related_fields:
queryset = queryset.select_related(*select_related_fields)
for field in fields:
# TODO: Hook this up in the view - 2016-01-15
if hasattr(queryset, 'prefetch_%s' % field):
queryset = getattr(queryset, 'prefetch_%s' % field)()
queryset = super(DynamicView, self).filter_queryset(
queryset, *args, **kwargs
)
return queryset
def get_serializer(self, *args, **kwargs):
"""
Apply 'fields' to dynamic fields serializer
"""
fields = self._get_query_fields()
kwargs['context'] = self.get_serializer_context()
return super(DynamicView, self).get_serializer(
fields=fields, *args, **kwargs
)
class DynamicListView(DynamicView, ListAPIView):
"""
List view with dynamic properties
"""
class DynamicDetailView(DynamicView, RetrieveAPIView):
"""
List view with dynamic properties
"""
class DynamicListCRUDView(DynamicView, ListCreateAPIView):
"""
List view with dynamic properties
"""
class DynamicDetailCRUDView(DynamicView, RetrieveUpdateDestroyAPIView):
"""
List view with dynamic properties
"""
class SaveAllSerializer(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.CreateModelMixin,
GenericAPIView):
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.partial_update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| openaid-IATI/OIPA | OIPA/api/generics/views.py | Python | agpl-3.0 | 6,974 |
#
# Copyright (c) 2015 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import time
import json
from twisted.internet import defer
from twisted.logger import Logger
from twisted.web.server import NOT_DONE_YET
from twisted.web.resource import Resource
from twisted.web import server
from leap.common import events
from pixelated.adapter.model.mail import InputMail
from pixelated.resources import respond_json_deferred, BaseResource
from pixelated.adapter.services.mail_sender import SMTPDownException
from pixelated.support.functional import to_unicode
log = Logger()
class MailsUnreadResource(Resource):
isLeaf = True
def __init__(self, mail_service):
Resource.__init__(self)
self._mail_service = mail_service
def render_POST(self, request):
idents = json.load(request.content).get('idents')
deferreds = []
for ident in idents:
deferreds.append(self._mail_service.mark_as_unread(ident))
d = defer.gatherResults(deferreds, consumeErrors=True)
d.addCallback(lambda _: respond_json_deferred(None, request))
d.addErrback(lambda _: respond_json_deferred(None, request, status_code=500))
return NOT_DONE_YET
class MailsReadResource(Resource):
isLeaf = True
def __init__(self, mail_service):
Resource.__init__(self)
self._mail_service = mail_service
def render_POST(self, request):
idents = json.load(request.content).get('idents')
deferreds = []
for ident in idents:
deferreds.append(self._mail_service.mark_as_read(ident))
d = defer.gatherResults(deferreds, consumeErrors=True)
d.addCallback(lambda _: respond_json_deferred(None, request))
d.addErrback(lambda _: respond_json_deferred(None, request, status_code=500))
return NOT_DONE_YET
class MailsDeleteResource(Resource):
isLeaf = True
def __init__(self, mail_service):
Resource.__init__(self)
self._mail_service = mail_service
def render_POST(self, request):
def response_failed(failure):
log.error('something failed: %s' % failure.getErrorMessage())
request.finish()
idents = json.loads(request.content.read())['idents']
deferreds = []
for ident in idents:
deferreds.append(self._mail_service.delete_mail(ident))
d = defer.gatherResults(deferreds, consumeErrors=True)
d.addCallback(lambda _: respond_json_deferred(None, request))
d.addErrback(response_failed)
return NOT_DONE_YET
class MailsRecoverResource(Resource):
isLeaf = True
def __init__(self, mail_service):
Resource.__init__(self)
self._mail_service = mail_service
def render_POST(self, request):
idents = json.loads(request.content.read())['idents']
deferreds = []
for ident in idents:
deferreds.append(self._mail_service.recover_mail(ident))
d = defer.gatherResults(deferreds, consumeErrors=True)
d.addCallback(lambda _: respond_json_deferred(None, request))
d.addErrback(lambda _: respond_json_deferred(None, request, status_code=500))
return NOT_DONE_YET
class MailsArchiveResource(Resource):
isLeaf = True
def __init__(self, mail_service):
Resource.__init__(self)
self._mail_service = mail_service
def render_POST(self, request):
idents = json.loads(request.content.read())['idents']
deferreds = []
for ident in idents:
deferreds.append(self._mail_service.archive_mail(ident))
d = defer.gatherResults(deferreds, consumeErrors=True)
d.addCallback(lambda _: respond_json_deferred({'successMessage': 'your-message-was-archived'}, request))
d.addErrback(lambda _: respond_json_deferred(None, request, status_code=500))
return NOT_DONE_YET
class MailsResource(BaseResource):
def _register_smtp_error_handler(self):
def on_error(event, content):
delivery_error_mail = InputMail.delivery_error_template(delivery_address=event.content)
self._mail_service.mailboxes.inbox.add(delivery_error_mail)
events.register(events.catalog.SMTP_SEND_MESSAGE_ERROR, callback=on_error)
def __init__(self, services_factory):
BaseResource.__init__(self, services_factory)
self._register_smtp_error_handler()
def getChild(self, action, request):
_mail_service = self.mail_service(request)
if action == 'delete':
return MailsDeleteResource(_mail_service)
if action == 'recover':
return MailsRecoverResource(_mail_service)
if action == 'archive':
return MailsArchiveResource(_mail_service)
if action == 'read':
return MailsReadResource(_mail_service)
if action == 'unread':
return MailsUnreadResource(_mail_service)
def _build_mails_response(self, (mails, total)):
return {
"stats": {
"total": total,
},
"mails": [mail.as_dict() for mail in mails]
}
def render_GET(self, request):
_mail_service = self.mail_service(request)
query, window_size, page = request.args.get('q')[0], request.args.get('w')[0], request.args.get('p')[0]
unicode_query = to_unicode(query)
d = _mail_service.mails(unicode_query, window_size, page)
d.addCallback(self._build_mails_response)
d.addCallback(lambda res: respond_json_deferred(res, request))
def error_handler(error):
print error
d.addErrback(error_handler)
return NOT_DONE_YET
def render_POST(self, request):
def onError(error):
if isinstance(error.value, SMTPDownException):
respond_json_deferred({'message': str(error.value)}, request, status_code=503)
else:
log.error('error occurred while sending: %s' % error.getErrorMessage())
respond_json_deferred({'message': 'an error occurred while sending'}, request, status_code=422)
deferred = self._handle_post(request)
deferred.addErrback(onError)
return server.NOT_DONE_YET
def render_PUT(self, request):
def onError(error):
log.error('error saving draft: %s' % error.getErrorMessage())
respond_json_deferred("", request, status_code=422)
deferred = self._handle_put(request)
deferred.addErrback(onError)
return server.NOT_DONE_YET
@defer.inlineCallbacks
def _fetch_attachment_contents(self, content_dict, _mail_service):
attachments = content_dict.get('attachments', []) if content_dict else []
for attachment in attachments:
retrieved_attachment = yield _mail_service.attachment(attachment['ident'])
attachment['raw'] = retrieved_attachment['content']
content_dict['attachments'] = attachments
defer.returnValue(content_dict)
@defer.inlineCallbacks
def _handle_post(self, request):
_mail_service = self.mail_service(request)
content_dict = json.loads(request.content.read())
with_attachment_content = yield self._fetch_attachment_contents(content_dict, _mail_service)
sent_mail = yield _mail_service.send_mail(with_attachment_content)
respond_json_deferred(sent_mail.as_dict(), request, status_code=201)
@defer.inlineCallbacks
def _handle_put(self, request):
_draft_service = self.draft_service(request)
_mail_service = self.mail_service(request)
content_dict = json.loads(request.content.read())
with_attachment_content = yield self._fetch_attachment_contents(content_dict, _mail_service)
_mail = InputMail.from_dict(with_attachment_content, from_address=_mail_service.account_email)
draft_id = content_dict.get('ident')
pixelated_mail = yield _draft_service.process_draft(draft_id, _mail)
if not pixelated_mail:
respond_json_deferred("", request, status_code=422)
else:
respond_json_deferred({'ident': pixelated_mail.ident}, request)
| pixelated-project/pixelated-user-agent | service/pixelated/resources/mails_resource.py | Python | agpl-3.0 | 8,824 |
# -*- coding: utf-8 -*-
from openerp import fields, models, api, _
class account_account_interest(models.Model):
_name = "account.account.interest"
_description = 'Account Account Interest'
account_id = fields.Many2one(
'account.account',
'Account',
required=True,
ondelete="cascade")
interest_account_id = fields.Many2one(
'account.account',
'Interest Account',
required=True,
domain=[('type', '!=', 'view')])
analytic_account_id = fields.Many2one(
'account.analytic.account',
'Analytic account',
domain=[('type', '!=', 'view')])
interest_rate = fields.Float(
'Interest',
required=True,
digits=(7, 4))
date_from = fields.Date(
'Date From',
required=True)
date_to = fields.Date('Date To')
class account_account(models.Model):
_inherit = "account.account"
account_account_interest_ids = fields.One2many(
'account.account.interest',
'account_id',
'Interest Rates')
def get_active_interest_data(self, cr, uid, ids, dt_from, dt_to, context=None):
if context is None:
context = {}
interest_obj = self.pool.get('account.account.interest')
res = {}
for record_id in ids:
interest_domain = [
('account_id.id', '=', record_id),
('date_from', '<=', dt_from),
'|', ('date_to', '>=', dt_to),
('date_to', '=', False)]
interest_ids = interest_obj.search(
cr, uid, interest_domain, context=context)
if interest_ids:
res[record_id] = interest_obj.browse(
cr, uid, interest_ids[0], context=context)
return res
| syci/ingadhoc-odoo-addons | account_interests/interest.py | Python | agpl-3.0 | 1,803 |
# Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| wetneb/dissemin | publishers/migrations/0008_fix_name_indices.py | Python | agpl-3.0 | 1,178 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserFeeds'
db.create_table(u'core_userfeeds', (
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='feeds', unique=True, primary_key=True, to=orm['base.User'])),
('imported_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='imported_items', unique=True, null=True, to=orm['core.BaseFeed'])),
('sent_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='sent_items', unique=True, null=True, to=orm['core.BaseFeed'])),
('received_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='received_items', unique=True, null=True, to=orm['core.BaseFeed'])),
('written_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='written_items', unique=True, null=True, to=orm['core.BaseFeed'])),
))
db.send_create_signal('core', ['UserFeeds'])
# Adding M2M table for field blogs on 'UserFeeds'
m2m_table_name = db.shorten_name(u'core_userfeeds_blogs')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('userfeeds', models.ForeignKey(orm['core.userfeeds'], null=False)),
('basefeed', models.ForeignKey(orm['core.basefeed'], null=False))
))
db.create_unique(m2m_table_name, ['userfeeds_id', 'basefeed_id'])
# Adding model 'UserSubscriptions'
db.create_table(u'core_usersubscriptions', (
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='subscriptions', unique=True, primary_key=True, to=orm['base.User'])),
('imported_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='imported_items', unique=True, null=True, to=orm['core.Subscription'])),
('sent_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='sent_items', unique=True, null=True, to=orm['core.Subscription'])),
('received_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='received_items', unique=True, null=True, to=orm['core.Subscription'])),
('written_items', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='written_items', unique=True, null=True, to=orm['core.Subscription'])),
))
db.send_create_signal('core', ['UserSubscriptions'])
# Adding M2M table for field blogs on 'UserSubscriptions'
m2m_table_name = db.shorten_name(u'core_usersubscriptions_blogs')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('usersubscriptions', models.ForeignKey(orm['core.usersubscriptions'], null=False)),
('subscription', models.ForeignKey(orm['core.subscription'], null=False))
))
db.create_unique(m2m_table_name, ['usersubscriptions_id', 'subscription_id'])
def backwards(self, orm):
# Deleting model 'UserFeeds'
db.delete_table(u'core_userfeeds')
# Removing M2M table for field blogs on 'UserFeeds'
db.delete_table(db.shorten_name(u'core_userfeeds_blogs'))
# Deleting model 'UserSubscriptions'
db.delete_table(u'core_usersubscriptions')
# Removing M2M table for field blogs on 'UserSubscriptions'
db.delete_table(db.shorten_name(u'core_usersubscriptions_blogs'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.user': {
'Meta': {'object_name': 'User'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'email_announcements': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'hash_codes': ('jsonfield.fields.JSONField', [], {'default': "{'unsubscribe': 'a2a11d045c484d9cb16448cca4075f1d'}", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'register_data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'sent_emails': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.article': {
'Meta': {'object_name': 'Article', '_ormbases': ['core.BaseItem']},
u'baseitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseItem']", 'unique': 'True', 'primary_key': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'is_orphaned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pages_urls': ('jsonfield.fields.JSONField', [], {'default': "u'[]'", 'blank': 'True'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['base.User']", 'symmetrical': 'False', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'url_absolute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'word_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'core.basefeed': {
'Meta': {'object_name': 'BaseFeed'},
'closed_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'date_last_fetch': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'errors': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'fetch_interval': ('django.db.models.fields.IntegerField', [], {'default': '43200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.BaseItem']", 'symmetrical': 'False', 'blank': 'True'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Language']", 'symmetrical': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.basefeed_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.baseitem': {
'Meta': {'object_name': 'BaseItem'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default_rating': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseItem']"}),
'excerpt': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'origin': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.baseitem_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'sources_rel_+'", 'blank': 'True', 'to': "orm['core.BaseItem']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.SimpleTag']", 'symmetrical': 'False', 'blank': 'True'}),
'text_direction': ('django.db.models.fields.CharField', [], {'default': "u'ltr'", 'max_length': '3'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.combinedfeed': {
'Meta': {'object_name': 'CombinedFeed'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.combinedfeedrule': {
'Meta': {'ordering': "('position',)", 'object_name': 'CombinedFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'combinedfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.CombinedFeed']"}),
'feeds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.BaseFeed']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.corepermissions': {
'Meta': {'object_name': 'CorePermissions'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'core.folder': {
'Meta': {'unique_together': "(('name', 'user', 'parent'),)", 'object_name': 'Folder'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.helpcontent': {
'Meta': {'ordering': "['ordering', 'id']", 'object_name': 'HelpContent'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_en': ('django.db.models.fields.TextField', [], {}),
'content_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name_nt': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'core.historyentry': {
'Meta': {'object_name': 'HistoryEntry'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.historyentry_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.language': {
'Meta': {'object_name': 'Language'},
'dj_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Language']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.mailaccount': {
'Meta': {'unique_together': "(('user', 'hostname', 'username'),)", 'object_name': 'MailAccount'},
'conn_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_conn': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2007, 1, 1, 0, 0)'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_usable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'use_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'core.mailfeed': {
'Meta': {'object_name': 'MailFeed', '_ormbases': ['core.BaseFeed']},
'account': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.MailAccount']", 'null': 'True', 'blank': 'True'}),
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'finish_action': ('django.db.models.fields.CharField', [], {'default': "u'markread'", 'max_length': '10'}),
'match_action': ('django.db.models.fields.CharField', [], {'default': "u'scrape'", 'max_length': '10'}),
'rules_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
'scrape_blacklist': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'scrape_whitelist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
'core.mailfeedrule': {
'Meta': {'ordering': "('group', 'position')", 'object_name': 'MailFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'header_field': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mailfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeed']"}),
'match_case': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_type': ('django.db.models.fields.CharField', [], {'default': "u'contains'", 'max_length': '10'}),
'match_value': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '1024'}),
'other_header': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.read': {
'Meta': {'unique_together': "(('user', 'item'),)", 'object_name': 'Read'},
'bookmark_type': ('django.db.models.fields.CharField', [], {'default': "u'U'", 'max_length': '2'}),
'check_set_subscriptions_131004_done': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_analysis': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_archived': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_auto_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_bookmarked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_fact': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_fun': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowhow': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowledge': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_number': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_prospective': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_quote': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_rules': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_starred': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_analysis': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_auto_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fun': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowhow': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_number': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_prospective': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_quote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_rules': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_starred': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseItem']"}),
'knowledge_type': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'senders': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'senders'", 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.SimpleTag']", 'symmetrical': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.rssatomfeed': {
'Meta': {'object_name': 'RssAtomFeed', '_ormbases': ['core.BaseFeed']},
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'last_etag': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.SimpleTag']", 'symmetrical': 'False', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'})
},
'core.simpletag': {
'Meta': {'unique_together': "(('name', 'language'),)", 'object_name': 'SimpleTag'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SimpleTag']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'origin_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'origin_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.SimpleTag']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.subscription': {
'Meta': {'unique_together': "(('feed', 'user'),)", 'object_name': 'Subscription'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseFeed']"}),
'folders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Folder']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Read']", 'symmetrical': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.SimpleTag']", 'symmetrical': 'False', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.userfeeds': {
'Meta': {'object_name': 'UserFeeds'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'feeds'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"})
},
'core.userimport': {
'Meta': {'object_name': 'UserImport', '_ormbases': ['core.HistoryEntry']},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'historyentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.HistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'results': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'urls': ('django.db.models.fields.TextField', [], {})
},
'core.usersubscriptions': {
'Meta': {'object_name': 'UserSubscriptions'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogs'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Subscription']"}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'subscriptions'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"})
},
'core.website': {
'Meta': {'object_name': 'WebSite'},
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'}),
'fetch_limit_nr': ('django.db.models.fields.IntegerField', [], {'default': '16', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'mail_warned': ('jsonfield.fields.JSONField', [], {'default': "u'[]'", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.WebSite']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['core'] | 1flow/1flow | oneflow/core/migrations/0036_auto__add_userfeeds__add_usersubscriptions.py | Python | agpl-3.0 | 37,603 |
from nxdrive.tests.common_unit_test import UnitTestCase
from nxdrive.client import RemoteDocumentClient
from nxdrive.client import LocalClient
class TestSharedFolders(UnitTestCase):
def test_move_sync_root_child_to_user_workspace(self):
"""See https://jira.nuxeo.com/browse/NXP-14870"""
admin_remote_client = self.root_remote_client
user1_workspace_path = ('/default-domain/UserWorkspaces/'
'nuxeoDriveTestUser-user-1')
try:
# Get remote and local clients
remote_user1 = RemoteDocumentClient(
self.nuxeo_url, self.user_1, u'nxdrive-test-device-1',
self.version, password=self.password_1,
upload_tmp_dir=self.upload_tmp_dir)
remote_user2 = RemoteDocumentClient(
self.nuxeo_url, self.user_2, u'nxdrive-test-device-2',
self.version, password=self.password_2,
upload_tmp_dir=self.upload_tmp_dir)
local_user2 = LocalClient(self.local_nxdrive_folder_2)
# Make sure personal workspace is created for user1
remote_user1.make_file_in_user_workspace('File in user workspace',
filename='UWFile.txt')
# As user1 register personal workspace as a sync root
remote_user1.register_as_root(user1_workspace_path)
# As user1 create a parent folder in user1's personal workspace
remote_user1.make_folder(user1_workspace_path, 'Parent')
# As user1 grant Everything permission to user2 on parent folder
parent_folder_path = user1_workspace_path + '/Parent'
op_input = "doc:" + parent_folder_path
admin_remote_client.execute("Document.SetACE", op_input=op_input, user="nuxeoDriveTestUser_user_2",
permission="Everything", grant="true")
# As user1 create a child folder in parent folder
remote_user1.make_folder(parent_folder_path, 'Child')
# As user2 register parent folder as a sync root
remote_user2.register_as_root(parent_folder_path)
remote_user2.unregister_as_root(self.workspace)
# Start engine for user2
self.engine_2.start()
# Wait for synchronization
self.wait_sync(wait_for_async=True, wait_for_engine_1=False, wait_for_engine_2=True)
# Check locally synchronized content
self.assertEquals(len(local_user2.get_children_info('/')), 1)
self.assertTrue(local_user2.exists('/Parent'))
self.assertTrue(local_user2.exists('/Parent/Child'))
# As user1 move child folder to user1's personal workspace
remote_user1.move(parent_folder_path + '/Child',
user1_workspace_path)
# Wait for synchronization
self.wait_sync(wait_for_async=True, wait_for_engine_1=False, wait_for_engine_2=True)
# Check locally synchronized content
self.assertFalse(local_user2.exists('/Parent/Child'))
finally:
# Cleanup user1 personal workspace
if admin_remote_client.exists(user1_workspace_path):
admin_remote_client.delete(user1_workspace_path,
use_trash=False)
| loopingz/nuxeo-drive | nuxeo-drive-client/nxdrive/tests/test_shared_folders.py | Python | lgpl-2.1 | 3,409 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Kcov(CMakePackage):
"""Code coverage tool for compiled programs, Python and Bash which uses
debugging information to collect and report data without special
compilation options"""
homepage = "http://simonkagstrom.github.io/kcov/index.html"
url = "https://github.com/SimonKagstrom/kcov/archive/38.tar.gz"
version('38', sha256='b37af60d81a9b1e3b140f9473bdcb7975af12040feb24cc666f9bb2bb0be68b4')
depends_on('[email protected]:', type='build')
depends_on('zlib')
depends_on('curl')
def cmake_args(self):
# Necessary at least on macOS, fixes linking error to LLDB
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/kcov.rb
return ['-DSPECIFY_RPATH=ON']
@run_after('install')
@on_package_attributes(run_tests=True)
def test_install(self):
# The help message exits with an exit code of 1
kcov = Executable(self.prefix.bin.kcov)
kcov('-h', ignore_errors=1)
| iulian787/spack | var/spack/repos/builtin/packages/kcov/package.py | Python | lgpl-2.1 | 1,192 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2016 Marek Marczykowski-Górecki
# <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <https://www.gnu.org/licenses/>.
#
#
import os
import qubes.tests
import time
import subprocess
from unittest import expectedFailure
class TC_00_HVM(qubes.tests.SystemTestsMixin, qubes.tests.QubesTestCase):
def setUp(self):
super(TC_00_HVM, self).setUp()
self.vm = self.qc.add_new_vm("QubesHVm",
name=self.make_vm_name('vm1'))
self.vm.create_on_disk(verbose=False)
@expectedFailure
def test_000_pci_passthrough_presence(self):
pcidev = os.environ.get('QUBES_TEST_PCIDEV', None)
if pcidev is None:
self.skipTest('Specify PCI device with QUBES_TEST_PCIDEV '
'environment variable')
self.vm.pcidevs = [pcidev]
self.vm.pci_strictreset = False
self.qc.save()
self.qc.unlock_db()
init_script = (
"#!/bin/sh\n"
"set -e\n"
"lspci -n > /dev/xvdb\n"
"poweroff\n"
)
self.prepare_hvm_system_linux(self.vm, init_script,
['/usr/sbin/lspci'])
self.vm.start()
timeout = 60
while timeout > 0:
if not self.vm.is_running():
break
time.sleep(1)
timeout -= 1
if self.vm.is_running():
self.fail("Timeout while waiting for VM shutdown")
with open(self.vm.storage.private_img, 'r') as f:
lspci_vm = f.read(512).strip('\0')
p = subprocess.Popen(['lspci', '-ns', pcidev], stdout=subprocess.PIPE)
(lspci_host, _) = p.communicate()
# strip BDF, as it is different in VM
pcidev_desc = ' '.join(lspci_host.strip().split(' ')[1:])
self.assertIn(pcidev_desc, lspci_vm)
| woju/qubes-core-admin | tests/hardware.py | Python | lgpl-2.1 | 2,564 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
#
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import datetime as dt
class Lammps(CMakePackage):
"""LAMMPS stands for Large-scale Atomic/Molecular Massively
Parallel Simulator. This package uses patch releases, not
stable release.
See https://github.com/spack/spack/pull/5342 for a detailed
discussion.
"""
homepage = "http://lammps.sandia.gov/"
url = "https://github.com/lammps/lammps/archive/patch_1Sep2017.tar.gz"
git = "https://github.com/lammps/lammps.git"
tags = ['ecp', 'ecp-apps']
version('develop', branch='master')
version('20180629', '6d5941863ee25ad2227ff3b7577d5e7c')
version('20180316', '25bad35679583e0dd8cb8753665bb84b')
version('20180222', '4d0513e3183bd57721814d217fdaf957')
version('20170922', '4306071f919ec7e759bda195c26cfd9a')
version('20170901', '767e7f07289663f033474dfe974974e7')
def url_for_version(self, version):
vdate = dt.datetime.strptime(str(version), "%Y%m%d")
return "https://github.com/lammps/lammps/archive/patch_{0}.tar.gz".format(
vdate.strftime("%d%b%Y").lstrip('0'))
supported_packages = ['asphere', 'body', 'class2', 'colloid', 'compress',
'coreshell', 'dipole', 'granular', 'kspace', 'latte',
'manybody', 'mc', 'meam', 'misc', 'molecule',
'mpiio', 'peri', 'poems', 'python', 'qeq', 'reax',
'replica', 'rigid', 'shock', 'snap', 'srd',
'user-atc', 'user-h5md', 'user-lb', 'user-misc',
'user-netcdf', 'user-omp', 'voronoi']
for pkg in supported_packages:
variant(pkg, default=False,
description='Activate the {0} package'.format(pkg))
variant('lib', default=True,
description='Build the liblammps in addition to the executable')
variant('mpi', default=True,
description='Build with mpi')
depends_on('mpi', when='+mpi')
depends_on('mpi', when='+mpiio')
depends_on('fftw', when='+kspace')
depends_on('voropp', when='+voronoi')
depends_on('netcdf+mpi', when='+user-netcdf')
depends_on('blas', when='+user-atc')
depends_on('lapack', when='+user-atc')
depends_on('[email protected]', when='@:20180222+latte')
depends_on('[email protected]:', when='@20180316:20180628+latte')
depends_on('[email protected]:', when='@20180629:+latte')
depends_on('blas', when='+latte')
depends_on('lapack', when='+latte')
depends_on('python', when='+python')
depends_on('mpi', when='+user-lb')
depends_on('mpi', when='+user-h5md')
depends_on('hdf5', when='+user-h5md')
conflicts('+body', when='+poems@:20180628')
conflicts('+latte', when='@:20170921')
conflicts('+python', when='~lib')
conflicts('+qeq', when='~manybody')
conflicts('+user-atc', when='~manybody')
conflicts('+user-misc', when='~manybody')
conflicts('+user-phonon', when='~kspace')
conflicts('+user-misc', when='~manybody')
patch("lib.patch", when="@20170901")
patch("660.patch", when="@20170922")
root_cmakelists_dir = 'cmake'
def cmake_args(self):
spec = self.spec
mpi_prefix = 'ENABLE'
pkg_prefix = 'ENABLE'
if spec.satisfies('@20180629:'):
mpi_prefix = 'BUILD'
pkg_prefix = 'PKG'
args = [
'-DBUILD_SHARED_LIBS={0}'.format(
'ON' if '+lib' in spec else 'OFF'),
'-D{0}_MPI={1}'.format(
mpi_prefix,
'ON' if '+mpi' in spec else 'OFF')
]
if spec.satisfies('@20180629:+lib'):
args.append('-DBUILD_LIB=ON')
for pkg in self.supported_packages:
opt = '-D{0}_{1}'.format(pkg_prefix, pkg.upper())
if '+{0}'.format(pkg) in spec:
args.append('{0}=ON'.format(opt))
else:
args.append('{0}=OFF'.format(opt))
if '+kspace' in spec:
args.append('-DFFT=FFTW3')
return args
| krafczyk/spack | var/spack/repos/builtin/packages/lammps/package.py | Python | lgpl-2.1 | 5,249 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Iwyu(CMakePackage):
"""include-what-you-use: A tool for use with clang to analyze #includes in
C and C++ source files
"""
homepage = "https://include-what-you-use.org"
url = "https://include-what-you-use.org/downloads/include-what-you-use-0.13.src.tar.gz"
maintainers = ['sethrj']
version('0.14', sha256='43184397db57660c32e3298a6b1fd5ab82e808a1f5ab0591d6745f8d256200ef')
version('0.13', sha256='49294270aa64e8c04182369212cd919f3b3e0e47601b1f935f038c761c265bc9')
version('0.12', sha256='a5892fb0abccb820c394e4e245c00ef30fc94e4ae58a048b23f94047c0816025')
version('0.11', sha256='2d2877726c4aed9518cbb37673ffbc2b7da9c239bf8fe29432da35c1c0ec367a')
patch('iwyu-013-cmake.patch', when='@0.13:0.14')
depends_on('[email protected]:10.999', when='@0.14')
depends_on('[email protected]:9.999', when='@0.13')
depends_on('[email protected]:8.999', when='@0.12')
depends_on('[email protected]:7.999', when='@0.11')
# Non-X86 CPU use all_targets variants because iwyu use X86AsmParser
depends_on('llvm+all_targets', when='target=aarch64:')
depends_on('llvm+all_targets', when='target=arm:')
depends_on('llvm+all_targets', when='target=ppc:')
depends_on('llvm+all_targets', when='target=ppcle:')
depends_on('llvm+all_targets', when='target=ppc64:')
depends_on('llvm+all_targets', when='target=ppc64le:')
depends_on('llvm+all_targets', when='target=sparc:')
depends_on('llvm+all_targets', when='target=sparc64:')
@when('@0.14:')
def cmake_args(self):
return [self.define('CMAKE_CXX_STANDARD', 14),
self.define('CMAKE_CXX_EXTENSIONS', False)]
| iulian787/spack | var/spack/repos/builtin/packages/iwyu/package.py | Python | lgpl-2.1 | 1,882 |
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('rabid_mott')
mobileTemplate.setLevel(6)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(15)
mobileTemplate.setHideType("Bristly Hide")
mobileTemplate.setHideAmount(15)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(5)
mobileTemplate.setSocialGroup("self")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_mott.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_charge_2')
attacks.add('bm_slash_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('rabid_mott', mobileTemplate)
return | agry/NGECore2 | scripts/mobiles/naboo/rabid_mott.py | Python | lgpl-3.0 | 1,588 |
# -*- coding: utf-8 -*-
# Copyright(C) 2017 Phyks (Lucas Verney)
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
class RATPTest(BackendTest):
MODULE = 'ratp'
def test_ratp_gauges(self):
l = list(self.backend.iter_gauges())
assert len(l) == 26
def test_ratp_gauges_filter(self):
l = list(self.backend.iter_gauges(pattern="T3A"))
assert len(l) == 1
def test_ratp_sensors(self):
l = list(self.backend.iter_sensors("ligne_metro_4"))
assert len(l) == 1
def test_ratp_status(self):
m = self.backend.get_last_measure("ligne_metro_4_sensor")
assert m.level <= 0.0
| vicnet/weboob | modules/ratp/test.py | Python | lgpl-3.0 | 1,363 |
import sys
from services.equipment import BonusSetTemplate
from java.util import Vector
def addBonusSet(core):
bonusSet = BonusSetTemplate("set_bonus_smuggler_utility_b")
bonusSet.addRequiredItem("item_band_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_ring_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_necklace_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_r_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_l_set_smuggler_utility_b_01_01")
core.equipmentService.addBonusSetTemplate(bonusSet)
def handleChange(core, creature, set):
wornItems = set.getWornTemplateCount(creature)
if wornItems == 3:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_1", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_1_sys', 0)
elif wornItems == 4:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_2", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_2_sys', 0)
elif wornItems == 5:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_3", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_3_sys', 0)
else:
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_1")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_2")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_3") | agry/NGECore2 | scripts/equipment/bonus_sets/set_bonus_smuggler_utility_b.py | Python | lgpl-3.0 | 1,539 |
# Copyright (C) 2015 Kevin Ross
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class DisablesWER(Signature):
name = "disables_wer"
description = "Attempts to disable Windows Error Reporting"
severity = 3
categories = ["stealth"]
authors = ["Kevin Ross"]
minimum = "1.2"
def run(self):
if self.check_write_key(pattern=".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\Windows\\ Error\\ Reporting\\\\Disabled$", regex=True):
return True
return False
| lixiangning888/whole_project | modules/signatures_orignal/disables_wer.py | Python | lgpl-3.0 | 645 |
# -*- coding: utf-8 -*-
import unittest
import warnings
from tcdb import adb
class TestADBSimple(unittest.TestCase):
def setUp(self):
self.adb = adb.ADBSimple()
self.adb.open('*')
def tearDown(self):
self.adb.close()
self.adb = None
def test_setgetitem(self):
self.adb['key'] = 'some string'
self.assertEqual(self.adb['key'], 'some string')
self.assertRaises(KeyError, self.adb.__getitem__, 'nonexistent key')
def test_put(self):
self.adb.put('key', 'some string')
self.assertEqual(self.adb.get('key'), 'some string')
self.assertEqual(self.adb.get('nonexistent key'), None)
self.assertEqual(self.adb.get('nonexistent key', 'def'), 'def')
def test_putkeep(self):
self.adb.putkeep('key', 'some string')
self.assertEqual(self.adb.get('key'), 'some string')
self.adb.putkeep('key', 'Never stored')
self.assertEqual(self.adb.get('key'), 'some string')
def test_putcat(self):
self.adb.putcat('key', 'some')
self.adb.putcat('key', ' text')
self.assertEquals(self.adb.get('key'), 'some text')
def test_out_and_contains(self):
self.assert_('key' not in self.adb)
self.adb.put('key', 'some text')
self.assert_('key' in self.adb)
self.adb.out('key')
self.assert_('key' not in self.adb)
self.adb.put('key', 'some text')
self.assert_('key' in self.adb)
del self.adb['key']
self.assert_('key' not in self.adb)
def test_vsiz(self):
self.adb.put('key', 'some text')
self.assertEqual(self.adb.vsiz('key'), len('some text'))
def test_iters(self):
keys = ['key1', 'key2', 'key3', 'key4', 'key5']
for key in keys:
self.adb.put(key, key)
self.assertEqual(len(self.adb.keys()), len(keys))
self.assertEqual(len(self.adb.values()), len(keys))
self.assertEqual(len(zip(keys, keys)), len(self.adb.items()))
for key in self.adb:
self.assert_(key in keys)
for value in self.adb.itervalues():
self.assert_(value in keys)
def test_fwmkeys(self):
objs = ['aa', 'ab', 'ac', 'xx', 'ad']
for obj in objs:
self.adb.put(obj, 'same value')
self.assertEqual(len(self.adb.fwmkeys('a')),
len(['aa', 'ab', 'ac', 'ad']))
self.assertEqual(self.adb.fwmkeys('x'), ['xx'])
self.assertEqual(self.adb.fwmkeys('nonexistent key'), [])
def test_admin_functions(self):
keys = ['key1', 'key2', 'key3', 'key4', 'key5']
for key in keys:
self.adb.put(key, key)
self.assertEquals(self.adb.path(), '*')
self.adb.sync()
self.assertEquals(len(self.adb), 5)
self.assertEquals(self.adb.size(), 525656)
self.adb.vanish()
self.assertEquals(self.adb.size(), 525376)
# def test_transaction(self):
# keys = ['key1', 'key2', 'key3', 'key4', 'key5']
# with self.adb as db:
# for key in keys:
# db.put(key, key)
# self.assertEquals(len(self.adb), 5)
# self.adb.vanish()
# try:
# with self.adb:
# for key in keys:
# self.adb.put(key, key)
# self.adb['bad key']
# except KeyError:
# pass
# self.assertEquals(len(self.adb), 0)
def test_foreach(self):
keys = ['key1', 'key2', 'key3', 'key4', 'key5']
def proc(key, value, op):
self.assertEquals(key, value)
self.assert_(key in keys)
self.assertEquals(op, 'test')
return True
for key in keys:
self.adb.put(key, key)
self.adb.foreach(proc, 'test')
class TestADB(unittest.TestCase):
def setUp(self):
self.adb = adb.ADB()
self.adb.open('*')
def tearDown(self):
self.adb.close()
self.adb = None
def test_setgetitem(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj1 in objs:
self.adb['obj'] = obj1
obj2 = self.adb['obj']
self.assertEqual(obj1, obj2)
self.adb[obj1] = obj1
obj2 = self.adb[obj1]
self.assertEqual(obj1, obj2)
self.assertRaises(KeyError, self.adb.__getitem__, 'nonexistent key')
def test_put(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj1 in objs:
self.adb.put(obj1, obj1)
obj2 = self.adb.get(obj1)
self.assertEqual(obj1, obj2)
self.adb.put(obj1, obj1, raw_key=True)
obj2 = self.adb.get(obj1, raw_key=True)
self.assertEqual(obj1, obj2)
self.assertEqual(self.adb.get('nonexistent key'), None)
self.assertEqual(self.adb.get('nonexistent key', 'def'), 'def')
def test_put_str(self):
str1 = 'some text [áéíóú]'
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_str(obj, str1)
str2 = self.adb.get_str(obj)
self.assertEqual(str1, str2)
self.adb.put_str(obj, str1, as_raw=True)
str2 = self.adb.get_str(obj, as_raw=True)
self.assertEqual(str1, str2)
unicode1 = u'unicode text [áéíóú]'
for obj in objs:
self.adb.put_str(obj, unicode1.encode('utf8'))
unicode2 = unicode(self.adb.get_str(obj), 'utf8')
self.assertEqual(unicode1, unicode2)
self.assertRaises(AssertionError, self.adb.put_str, 'key', 10)
self.assertEqual(self.adb.get_str('nonexistent key'), None)
self.assertEqual(self.adb.get_str('nonexistent key', 'def'), 'def')
def test_put_unicode(self):
unicode1 = u'unicode text [áéíóú]'
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_unicode(obj, unicode1)
unicode2 = self.adb.get_unicode(obj)
self.assertEqual(unicode1, unicode2)
self.adb.put_unicode(obj, unicode1, as_raw=True)
unicode2 = self.adb.get_unicode(obj, as_raw=True)
self.assertEqual(unicode1, unicode2)
self.assertRaises(AssertionError, self.adb.put_unicode, 'key', 10)
self.assertEqual(self.adb.get_unicode('nonexistent key'), None)
self.assertEqual(self.adb.get_unicode('nonexistent key', 'def'), 'def')
def test_put_int(self):
int1 = 10
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_int(obj, int1)
int2 = self.adb.get_int(obj)
self.assertEqual(int1, int2)
self.adb.put_int(obj, int1, as_raw=True)
int2 = self.adb.get_int(obj, as_raw=True)
self.assertEqual(int1, int2)
self.assertRaises(AssertionError, self.adb.put_int, 'key', '10')
self.assertEqual(self.adb.get_int('nonexistent key'), None)
self.assertEqual(self.adb.get_int('nonexistent key', 'def'), 'def')
def test_put_float(self):
float1 = 10.10
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_float(obj, float1)
float2 = self.adb.get_float(obj)
self.assertEqual(float1, float2)
self.adb.put_float(obj, float1, as_raw=True)
float2 = self.adb.get_float(obj, as_raw=True)
self.assertEqual(float1, float2)
self.assertRaises(AssertionError, self.adb.put_float, 'key', 10)
self.assertEqual(self.adb.get_float('nonexistent key'), None)
self.assertEqual(self.adb.get_float('nonexistent key', 'def'), 'def')
def test_putkeep(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj1 in objs:
self.adb.putkeep(obj1, obj1)
obj2 = self.adb.get(obj1)
self.assertEqual(obj1, obj2)
self.adb.putkeep(obj1, 'Never stored')
obj2 = self.adb.get(obj1)
self.assertEqual(obj1, obj2)
def test_putkeep_str(self):
str1 = 'some text [áéíóú]'
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putkeep_str(obj, str1)
str2 = self.adb.get_str(obj)
self.assertEqual(str1, str2)
self.adb.putkeep_str(obj, 'Never stored')
str2 = self.adb.get_str(obj)
self.assertEqual(str1, str2)
self.assertRaises(AssertionError, self.adb.putkeep_str, 'key', 10)
def test_putkeep_unicode(self):
unicode1 = u'unicode text [áéíóú]'
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putkeep_unicode(obj, unicode1)
unicode2 = self.adb.get_unicode(obj)
self.assertEqual(unicode1, unicode2)
self.adb.putkeep_unicode(obj, u'Never stored')
unicode2 = self.adb.get_unicode(obj)
self.assertEqual(unicode1, unicode2)
self.assertRaises(AssertionError, self.adb.putkeep_unicode, 'key', 10)
def test_putkeep_int(self):
int1 = 10
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putkeep_int(obj, int1)
int2 = self.adb.get_int(obj)
self.assertEqual(int1, int2)
self.adb.putkeep_int(obj, int1*10)
int2 = self.adb.get_int(obj)
self.assertEqual(int1, int2)
self.assertRaises(AssertionError, self.adb.putkeep_int, 'key', '10')
def test_putkeep_float(self):
float1 = 10.10
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putkeep_float(obj, float1)
float2 = self.adb.get_float(obj)
self.assertEqual(float1, float2)
self.adb.putkeep_float(obj, float1*10)
float2 = self.adb.get_float(obj)
self.assertEqual(float1, float2)
self.assertRaises(AssertionError, self.adb.put_float, 'key', 10)
def test_putcat_str(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putcat_str(obj, 'some')
for obj in objs:
self.adb.putcat_str(obj, ' text')
for obj in objs:
self.assertEquals(self.adb.get_str(obj), 'some text')
def test_putcat_unicode(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.putcat_unicode(obj, u'some')
for obj in objs:
self.adb.putcat_unicode(obj, u' text')
for obj in objs:
self.assertEquals(self.adb.get_unicode(obj), u'some text')
def test_out_and_contains(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put(obj, obj)
self.assert_(obj in self.adb)
self.adb.out(obj)
self.assert_(obj not in self.adb)
for obj in objs:
self.adb.put(obj, obj)
self.assert_(obj in self.adb)
del self.adb[obj]
self.assert_(obj not in self.adb)
def test_vsiz(self):
obj = 1+1j
self.adb.put(obj, obj)
vsiz = self.adb.vsiz(obj)
self.assertEqual(vsiz, 48)
obj = 'some text [áéíóú]'
self.adb.put_str(obj, obj)
vsiz = self.adb.vsiz(obj)
self.assertEqual(vsiz, 22)
obj = u'unicode text [áéíóú]'
self.adb.put_str(obj, obj.encode('utf8'))
vsiz = self.adb.vsiz(obj)
self.assertEqual(vsiz, 25)
obj = 10
self.adb.put_int(obj, obj)
vsiz = self.adb.vsiz(obj)
self.assertEqual(vsiz, 4)
obj = 10.10
self.adb.put_float(obj, obj)
vsiz = self.adb.vsiz(obj)
self.assertEqual(vsiz, 8)
def test_iters(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put(obj, obj)
self.assertEqual(len(self.adb.keys()), len(objs))
self.assertEqual(len(self.adb.values()), len(objs))
for key in self.adb:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assert_(key in objs)
for value in self.adb.itervalues():
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assert_(value in objs)
def test_fwmkeys(self):
objs = ['aa', 'ab', 'ac', 'xx', 'ad']
for obj in objs:
self.adb.put(obj, 'same value', raw_key=True)
self.assertEqual(len(self.adb.fwmkeys('a')),
len(['aa', 'ab', 'ac', 'ad']))
self.assertEqual(self.adb.fwmkeys('x'), ['xx'])
self.assertEqual(self.adb.fwmkeys('nonexistent key'), [])
def test_add_int(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_int(obj, 10)
for key in self.adb:
self.adb.add_int(key, 2)
for key in self.adb:
self.assertEqual(self.adb.get_int(key), 12)
def test_add_float(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put_float(obj, 10.0)
for key in self.adb:
self.adb.add_float(key, 2.0)
for key in self.adb:
self.assertEqual(self.adb.get_float(key), 12.0)
def test_admin_functions(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
for obj in objs:
self.adb.put(obj, obj)
self.assertEquals(self.adb.path(), '*')
self.adb.sync()
self.assertEquals(len(self.adb), 5)
self.assertEquals(self.adb.size(), 525874)
self.adb.vanish()
self.assertEquals(self.adb.size(), 525376)
# def test_transaction(self):
# objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
# with self.adb as db:
# for obj in objs:
# db.put(obj, obj)
# self.assertEquals(len(self.adb), 5)
# self.adb.vanish()
# try:
# with self.adb:
# for obj in objs:
# self.adb.put(obj, obj)
# self.adb.get('Not exist key')
# except KeyError:
# pass
# self.assertEquals(len(self.adb), 0)
def test_foreach(self):
objs = [1+1j, 'some text [áéíóú]', u'unicode text [áéíóú]', 10, 10.0]
def proc(key, value, op):
self.assertEquals(key, value)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assert_(key in objs)
self.assertEquals(op, 'test')
return True
for obj in objs:
self.adb.put(obj, obj)
self.adb.foreach(proc, 'test')
if __name__ == '__main__':
unittest.main()
| aplanas/py-tcdb | test/test-adb.py | Python | lgpl-3.0 | 15,634 |
from sys import argv
from xml.dom import minidom
import csv
stem = argv[1][:-4] if argv[1].endswith('.xml') else argv[1]
xmldoc = minidom.parse('%s.xml'%stem)
labellist = xmldoc.getElementsByTagName('label')
labels = [l.attributes['name'].value for l in labellist]
labelset = set(labels)
for split in 'train','test':
with open('%s-%s.csv'%(stem,split), 'rb') as csvfile:
reader = csv.DictReader(csvfile)
features = [f for f in reader.fieldnames if f not in labelset]
x = open('%s-%s.x.txt'%(stem,split), 'w')
y = open('%s-%s.y.txt'%(stem,split), 'w')
for row in reader:
xbuf = ' '.join([row[f] for f in features])
ybuf = ' '.join([row[l] for l in labels])
x.write('%s\n'%xbuf)
y.write("%s\n"%ybuf)
x.close()
y.close()
| pmineiro/randembed | mulan/xmlcsv2xy.py | Python | unlicense | 829 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProductType'
db.create_table('inventory_producttype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('inventory', ['ProductType'])
# Adding model 'Product'
db.create_table('inventory_product', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('description', self.gf('django.db.models.fields.TextField')()),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'])),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'])),
('date_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('weight', self.gf('django.db.models.fields.DecimalField')(default='0', max_digits=11, decimal_places=2)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.ProductType'], null=True, blank=True)),
))
db.send_create_signal('inventory', ['Product'])
# Adding model 'Coin'
db.create_table('inventory_coin', (
('producttype_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['inventory.ProductType'], unique=True, primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'], null=True, blank=True)),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'], null=True, blank=True)),
('country_code', self.gf('django.db.models.fields.CharField')(default='us', max_length=2)),
('pcgs_number', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('year_issued', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('actual_year', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('denomination', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('major_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('die_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('prefix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('suffix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('sort_order', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('heading', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety_2', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('additional_data', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('last_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('inventory', ['Coin'])
def backwards(self, orm):
# Deleting model 'ProductType'
db.delete_table('inventory_producttype')
# Deleting model 'Product'
db.delete_table('inventory_product')
# Deleting model 'Coin'
db.delete_table('inventory_coin')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'inventory.coin': {
'Meta': {'object_name': 'Coin', '_ormbases': ['inventory.ProductType']},
'actual_year': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"}),
'additional_data': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']", 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'default': "'us'", 'max_length': '2'}),
'denomination': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'die_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety_2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'major_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'pcgs_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'prefix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'producttype_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['inventory.ProductType']", 'unique': 'True', 'primary_key': 'True'}),
'sort_order': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']", 'null': 'True', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'year_issued': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['inventory']
| codepython/CollectorCity-Market-Place | stores/apps/inventory/migrations/0001_initial.py | Python | apache-2.0 | 13,952 |
import json
from decimal import Decimal
class CustomEncoder(json.JSONEncoder):
def default(self, object):
if isinstance(object, set):
return list(object)
if isinstance(object, Decimal):
if object % 1 > 0:
return float(object)
else:
return int(object)
return super(CustomEncoder, self).default(object)
| apoclyps/pyrestful | pyrestful/encoders.py | Python | apache-2.0 | 399 |
from copy import copy
import pytest
from plenum.common.stacks import nodeStackClass
from plenum.common.util import randomString
from stp_core.loop.eventually import eventually
from stp_core.network.auth_mode import AuthMode
from stp_core.network.port_dispenser import genHa
from stp_core.test.helper import Printer, prepStacks, checkStacksConnected
from stp_zmq.kit_zstack import KITZStack
from stp_zmq.test.helper import genKeys
from stp_zmq.zstack import Quota
@pytest.fixture()
def registry():
return {
'Alpha': genHa(),
'Beta': genHa(),
'Gamma': genHa(),
'Delta': genHa()
}
@pytest.fixture()
def connection_timeout(tconf):
# TODO: the connection may not be established for the first try because
# some of the stacks may not have had a remote yet (that is they haven't had yet called connect)
return 2 * tconf.RETRY_TIMEOUT_RESTRICTED + 1
def create_fake_nodestack(tdir, tconf, registry, name='Node1'):
def msgHandler(msg):
pass
stackParams = {
"name": name,
"ha": genHa(),
"auto": 2,
"basedirpath": tdir
}
stack = nodeStackClass(stackParams, msgHandler, registry, randomString(32), config=tconf)
return stack
@pytest.fixture()
def connected_nodestacks(registry, tdir, looper, connection_timeout, tconf):
genKeys(tdir, registry.keys())
stacks = []
for name, ha in registry.items():
printer = Printer(name)
stackParams = dict(name=name, ha=ha, basedirpath=tdir,
auth_mode=AuthMode.RESTRICTED.value)
reg = copy(registry)
reg.pop(name)
stack = KITZStack(stackParams, printer.print, reg)
stack.listenerQuota = tconf.NODE_TO_NODE_STACK_QUOTA
stack.listenerSize = tconf.NODE_TO_NODE_STACK_SIZE
stacks.append(stack)
motors = prepStacks(looper, *stacks, connect=False, useKeys=True)
looper.run(eventually(
checkStacksConnected, stacks, retryWait=1, timeout=connection_timeout))
return stacks, motors
def test_set_quota(tdir, tconf, registry):
changed_val = 100000
tconf.NODE_TO_NODE_STACK_QUOTA = changed_val
stack = create_fake_nodestack(tdir, tconf, registry)
assert stack.listenerQuota == tconf.NODE_TO_NODE_STACK_QUOTA
def test_set_size(tdir, tconf, registry):
changed_val = 100000
tconf.NODE_TO_NODE_STACK_SIZE = changed_val
stack = create_fake_nodestack(tdir, tconf, registry)
assert stack.listenerSize == tconf.NODE_TO_NODE_STACK_SIZE
def test_limit_by_msg_count(looper, tdir, tconf, connected_nodestacks):
stacks, motors = connected_nodestacks
stackA = stacks[0]
stackB = stacks[1]
msg = 'some test messages'
for i in range(tconf.NODE_TO_NODE_STACK_QUOTA + 10):
stackA.send(msg, 'Beta')
received_msgs = stackB._receiveFromListener(Quota(count=stackA.listenerQuota, size=stackA.listenerSize))
assert received_msgs <= tconf.NODE_TO_NODE_STACK_QUOTA
def test_limit_by_msg_size(looper, tdir, tconf, connected_nodestacks):
stacks, motors = connected_nodestacks
stackA = stacks[0]
stackB = stacks[1]
msg = 'some test messages'
limit_size = (tconf.NODE_TO_NODE_STACK_QUOTA - 10) * len(msg)
stackB.listenerSize = limit_size
for i in range(tconf.NODE_TO_NODE_STACK_QUOTA + 10):
stackA.send(msg, 'Beta')
received_msgs = stackB._receiveFromListener(Quota(count=stackA.listenerQuota, size=stackA.listenerSize))
assert received_msgs < tconf.NODE_TO_NODE_STACK_QUOTA
| evernym/zeno | stp_zmq/test/test_node_to_node_quota.py | Python | apache-2.0 | 3,525 |
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import logging
import oslo_messaging
from oslo_messaging._drivers import common as rpc_common
from oslo_messaging._drivers.zmq_driver.client.publishers\
import zmq_publisher_base
from oslo_messaging._drivers.zmq_driver import zmq_address
from oslo_messaging._drivers.zmq_driver import zmq_async
from oslo_messaging._drivers.zmq_driver import zmq_names
from oslo_messaging._i18n import _LE, _LI
LOG = logging.getLogger(__name__)
zmq = zmq_async.import_zmq()
class ReqPublisher(zmq_publisher_base.PublisherBase):
def send_request(self, request):
if request.msg_type != zmq_names.CALL_TYPE:
raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type)
socket = self._connect_to_host(request.target)
self._send_request(socket, request)
return self._receive_reply(socket, request)
def _connect_to_host(self, target):
try:
self.zmq_context = zmq.Context()
socket = self.zmq_context.socket(zmq.REQ)
host = self.matchmaker.get_single_host(target)
connect_address = zmq_address.get_tcp_direct_address(host)
LOG.info(_LI("Connecting REQ to %s") % connect_address)
socket.connect(connect_address)
self.outbound_sockets[str(target)] = socket
return socket
except zmq.ZMQError as e:
errmsg = _LE("Error connecting to socket: %s") % str(e)
LOG.error(_LE("Error connecting to socket: %s") % str(e))
raise rpc_common.RPCException(errmsg)
@staticmethod
def _receive_reply(socket, request):
def _receive_method(socket):
return socket.recv_pyobj()
# NOTE(ozamiatin): Check for retry here (no retries now)
with contextlib.closing(zmq_async.get_reply_poller()) as poller:
poller.register(socket, recv_method=_receive_method)
reply, socket = poller.poll(timeout=request.timeout)
if reply is None:
raise oslo_messaging.MessagingTimeout(
"Timeout %s seconds was reached" % request.timeout)
if reply[zmq_names.FIELD_FAILURE]:
raise rpc_common.deserialize_remote_exception(
reply[zmq_names.FIELD_FAILURE],
request.allowed_remote_exmods)
else:
return reply[zmq_names.FIELD_REPLY]
def close(self):
# For contextlib compatibility
self.cleanup()
| magic0704/oslo.messaging | oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py | Python | apache-2.0 | 3,103 |
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to represent a Static Virtual Machine object.
All static VMs provided in a given group will be used before any non-static
VMs are provisioned. For example, in a test that uses 4 VMs, if 3 static VMs
are provided, all of them will be used and one additional non-static VM
will be provisioned. The VM's should be set up with passwordless ssh and
passwordless sudo (neither sshing nor running a sudo command should prompt
the user for a password).
All VM specifics are self-contained and the class provides methods to
operate on the VM: boot, shutdown, etc.
"""
import collections
import json
import logging
import threading
from perfkitbenchmarker import disk
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import windows_virtual_machine
WINDOWS = 'windows'
DEBIAN = 'debian'
RHEL = 'rhel'
UBUNTU_CONTAINER = 'ubuntu_container'
FLAGS = flags.FLAGS
class StaticVmSpec(virtual_machine.BaseVmSpec):
"""Object containing all info needed to create a Static VM."""
def __init__(self, ip_address=None, user_name=None, ssh_private_key=None,
internal_ip=None, ssh_port=22, install_packages=True,
password=None, disk_specs=None, os_type=None, **kwargs):
"""Initialize the StaticVmSpec object.
Args:
ip_address: The public ip address of the VM.
user_name: The username of the VM that the keyfile corresponds to.
ssh_private_key: The absolute path to the private keyfile to use to ssh
to the VM.
internal_ip: The internal ip address of the VM.
ssh_port: The port number to use for SSH and SCP commands.
install_packages: If false, no packages will be installed. This is
useful if benchmark dependencies have already been installed.
password: The password used to log into the VM (Windows Only).
disk_specs: A list of dictionaries containing kwargs used to create
disk.BaseDiskSpecs.
os_type: The OS type of the VM. See the flag of the same name for more
information.
"""
super(StaticVmSpec, self).__init__(**kwargs)
self.ip_address = ip_address
self.user_name = user_name
self.ssh_private_key = ssh_private_key
self.internal_ip = internal_ip
self.ssh_port = ssh_port
self.install_packages = install_packages
self.password = password
self.os_type = os_type
self.disk_specs = disk_specs
class StaticDisk(disk.BaseDisk):
"""Object representing a static Disk."""
def _Create(self):
"""StaticDisks don't implement _Create()."""
pass
def _Delete(self):
"""StaticDisks don't implement _Delete()."""
pass
def Attach(self):
"""StaticDisks don't implement Attach()."""
pass
def Detach(self):
"""StaticDisks don't implement Detach()."""
pass
class StaticVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a Static Virtual Machine."""
is_static = True
vm_pool = collections.deque()
vm_pool_lock = threading.Lock()
def __init__(self, vm_spec):
"""Initialize a static virtual machine.
Args:
vm_spec: A StaticVmSpec object containing arguments.
"""
super(StaticVirtualMachine, self).__init__(vm_spec, None, None)
self.ip_address = vm_spec.ip_address
self.user_name = vm_spec.user_name
self.ssh_private_key = vm_spec.ssh_private_key
self.internal_ip = vm_spec.internal_ip
self.zone = self.zone or ('Static - %s@%s' % (self.user_name,
self.ip_address))
self.ssh_port = vm_spec.ssh_port
self.install_packages = vm_spec.install_packages
self.password = vm_spec.password
if vm_spec.disk_specs:
for spec in vm_spec.disk_specs:
self.disk_specs.append(disk.BaseDiskSpec(**spec))
self.from_pool = False
def _Create(self):
"""StaticVirtualMachines do not implement _Create()."""
pass
def _Delete(self):
"""Returns the virtual machine to the pool."""
if self.from_pool:
with self.vm_pool_lock:
self.vm_pool.appendleft(self)
def CreateScratchDisk(self, disk_spec):
"""Create a VM's scratch disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
spec = self.disk_specs[len(self.scratch_disks)]
self.scratch_disks.append(StaticDisk(spec))
def DeleteScratchDisks(self):
"""StaticVirtualMachines do not delete scratch disks."""
pass
def GetLocalDisks(self):
"""Returns a list of local disks on the VM."""
return [disk_spec.device_path
for disk_spec in self.disk_specs if disk_spec.device_path]
@classmethod
def ReadStaticVirtualMachineFile(cls, file_obj):
"""Read a file describing the static VMs to use.
This function will read the static VM information from the provided file,
instantiate VMs corresponding to the info, and add the VMs to the static
VM pool. The provided file should contain a single array in JSON-format.
Each element in the array must be an object with required format:
ip_address: string.
user_name: string.
keyfile_path: string.
ssh_port: integer, optional. Default 22
internal_ip: string, optional.
zone: string, optional.
local_disks: array of strings, optional.
scratch_disk_mountpoints: array of strings, optional
os_type: string, optional (see package_managers)
install_packages: bool, optional
Args:
file_obj: An open handle to a file containing the static VM info.
Raises:
ValueError: On missing required keys, or invalid keys.
"""
vm_arr = json.load(file_obj)
if not isinstance(vm_arr, list):
raise ValueError('Invalid static VM file. Expected array, got: %s.' %
type(vm_arr))
required_keys = frozenset(['ip_address', 'user_name'])
linux_required_keys = required_keys | frozenset(['keyfile_path'])
required_keys_by_os = {
WINDOWS: required_keys | frozenset(['password']),
DEBIAN: linux_required_keys,
RHEL: linux_required_keys,
UBUNTU_CONTAINER: linux_required_keys,
}
required_keys = required_keys_by_os[FLAGS.os_type]
optional_keys = frozenset(['internal_ip', 'zone', 'local_disks',
'scratch_disk_mountpoints', 'os_type',
'ssh_port', 'install_packages'])
allowed_keys = required_keys | optional_keys
def VerifyItemFormat(item):
"""Verify that the decoded JSON object matches the required schema."""
item_keys = frozenset(item)
extra_keys = sorted(item_keys - allowed_keys)
missing_keys = required_keys - item_keys
if extra_keys:
raise ValueError('Unexpected keys: {0}'.format(', '.join(extra_keys)))
elif missing_keys:
raise ValueError('Missing required keys: {0}'.format(
', '.join(missing_keys)))
for item in vm_arr:
VerifyItemFormat(item)
ip_address = item['ip_address']
user_name = item['user_name']
keyfile_path = item.get('keyfile_path')
internal_ip = item.get('internal_ip')
zone = item.get('zone')
local_disks = item.get('local_disks', [])
password = item.get('password')
if not isinstance(local_disks, list):
raise ValueError('Expected a list of local disks, got: {0}'.format(
local_disks))
scratch_disk_mountpoints = item.get('scratch_disk_mountpoints', [])
if not isinstance(scratch_disk_mountpoints, list):
raise ValueError(
'Expected a list of disk mount points, got: {0}'.format(
scratch_disk_mountpoints))
ssh_port = item.get('ssh_port', 22)
os_type = item.get('os_type')
install_packages = item.get('install_packages', True)
if ((os_type == WINDOWS and FLAGS.os_type != WINDOWS) or
(os_type != WINDOWS and FLAGS.os_type == WINDOWS)):
raise ValueError('Please only use Windows VMs when using '
'--os_type=windows and vice versa.')
disk_kwargs_list = []
for path in scratch_disk_mountpoints:
disk_kwargs_list.append({'mount_point': path})
for local_disk in local_disks:
disk_kwargs_list.append({'device_path': local_disk})
vm_spec = StaticVmSpec(
ip_address=ip_address, user_name=user_name, ssh_port=ssh_port,
install_packages=install_packages, ssh_private_key=keyfile_path,
internal_ip=internal_ip, zone=zone, disk_specs=disk_kwargs_list,
password=password)
vm_class = GetStaticVmClass(os_type)
vm = vm_class(vm_spec)
cls.vm_pool.append(vm)
@classmethod
def GetStaticVirtualMachine(cls):
"""Pull a Static VM from the pool of static VMs.
If there are no VMs left in the pool, the method will return None.
Returns:
A static VM from the pool, or None if there are no static VMs left.
"""
with cls.vm_pool_lock:
if cls.vm_pool:
vm = cls.vm_pool.popleft()
vm.from_pool = True
return vm
else:
return None
def GetStaticVmClass(os_type):
"""Returns the static VM class that corresponds to the os_type."""
class_dict = {
DEBIAN: DebianBasedStaticVirtualMachine,
RHEL: RhelBasedStaticVirtualMachine,
WINDOWS: WindowsBasedStaticVirtualMachine,
UBUNTU_CONTAINER: ContainerizedStaticVirtualMachine,
}
if os_type in class_dict:
return class_dict[os_type]
else:
logging.warning('Could not find os type for VM. Defaulting to debian.')
return DebianBasedStaticVirtualMachine
class ContainerizedStaticVirtualMachine(
StaticVirtualMachine, linux_virtual_machine.ContainerizedDebianMixin):
pass
class DebianBasedStaticVirtualMachine(StaticVirtualMachine,
linux_virtual_machine.DebianMixin):
pass
class RhelBasedStaticVirtualMachine(StaticVirtualMachine,
linux_virtual_machine.RhelMixin):
pass
class WindowsBasedStaticVirtualMachine(StaticVirtualMachine,
windows_virtual_machine.WindowsMixin):
pass
| syed/PerfKitBenchmarker | perfkitbenchmarker/static_virtual_machine.py | Python | apache-2.0 | 10,858 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import request_id
import webob
from tacker import auth
from tacker.tests import base
class TackerKeystoneContextTestCase(base.BaseTestCase):
def setUp(self):
super(TackerKeystoneContextTestCase, self).setUp()
@webob.dec.wsgify
def fake_app(req):
self.context = req.environ['tacker.context']
return webob.Response()
self.context = None
self.middleware = auth.TackerKeystoneContext(fake_app)
self.request = webob.Request.blank('/')
self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken'
def test_no_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '401 Unauthorized')
def test_with_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user, 'testuserid')
def test_with_tenant_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'test_user_id'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant, 'testtenantid')
def test_roles_no_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = 'role1, role2 , role3,role4,role5'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5'])
self.assertEqual(self.context.is_admin, False)
def test_roles_with_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = ('role1, role2 , role3,role4,role5,'
'AdMiN')
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5', 'AdMiN'])
self.assertEqual(self.context.is_admin, True)
def test_with_user_tenant_name(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_PROJECT_NAME'] = 'testtenantname'
self.request.headers['X_USER_NAME'] = 'testusername'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user_name, 'testusername')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant_name, 'testtenantname')
def test_request_id_extracted_from_env(self):
req_id = 'dummy-request-id'
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.environ[request_id.ENV_REQUEST_ID] = req_id
self.request.get_response(self.middleware)
self.assertEqual(req_id, self.context.request_id)
| priya-pp/Tacker | tacker/tests/unit/test_auth.py | Python | apache-2.0 | 4,418 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
test.main()
| alsrgv/tensorflow | tensorflow/python/compiler/tensorrt/test/int32_test.py | Python | apache-2.0 | 2,529 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
scale = 1.0
def sleep(secs):
import time
time.sleep(secs*scale)
| phrocker/accumulo | test/system/auto/sleep.py | Python | apache-2.0 | 856 |
# Copyright 2014 - Rackspace Hosting.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client as barbicanclient
from glanceclient.v2 import client as glanceclient
from heatclient.v1 import client as heatclient
from novaclient.v2 import client as novaclient
from oslo_config import cfg
from oslo_log import log as logging
from magnum.common import exception
from magnum.common import magnum_keystoneclient
from magnum.i18n import _
LOG = logging.getLogger(__name__)
magnum_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
heat_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.')),
cfg.StrOpt('ca_file',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.StrOpt('cert_file',
help=_('Optional PEM-formatted certificate chain file.')),
cfg.StrOpt('key_file',
help=_('Optional PEM-formatted file that contains the '
'private key.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate will not "
"be verified."))]
glance_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
barbican_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
nova_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
cfg.CONF.register_opts(magnum_client_opts, group='magnum_client')
cfg.CONF.register_opts(heat_client_opts, group='heat_client')
cfg.CONF.register_opts(glance_client_opts, group='glance_client')
cfg.CONF.register_opts(barbican_client_opts, group='barbican_client')
cfg.CONF.register_opts(nova_client_opts, group='nova_client')
class OpenStackClients(object):
"""Convenience class to create and cache client instances."""
def __init__(self, context):
self.context = context
self._keystone = None
self._heat = None
self._glance = None
self._barbican = None
self._nova = None
def url_for(self, **kwargs):
return self.keystone().client.service_catalog.url_for(**kwargs)
def magnum_url(self):
endpoint_type = self._get_client_option('magnum', 'endpoint_type')
region_name = self._get_client_option('magnum', 'region_name')
return self.url_for(service_type='container',
endpoint_type=endpoint_type,
region_name=region_name)
@property
def auth_url(self):
return self.keystone().v3_endpoint
@property
def auth_token(self):
return self.context.auth_token or self.keystone().auth_token
def keystone(self):
if self._keystone:
return self._keystone
self._keystone = magnum_keystoneclient.KeystoneClientV3(self.context)
return self._keystone
def _get_client_option(self, client, option):
return getattr(getattr(cfg.CONF, '%s_client' % client), option)
@exception.wrap_keystone_exception
def heat(self):
if self._heat:
return self._heat
endpoint_type = self._get_client_option('heat', 'endpoint_type')
region_name = self._get_client_option('heat', 'region_name')
endpoint = self.url_for(service_type='orchestration',
endpoint_type=endpoint_type,
region_name=region_name)
args = {
'endpoint': endpoint,
'auth_url': self.auth_url,
'token': self.auth_token,
'username': None,
'password': None,
'ca_file': self._get_client_option('heat', 'ca_file'),
'cert_file': self._get_client_option('heat', 'cert_file'),
'key_file': self._get_client_option('heat', 'key_file'),
'insecure': self._get_client_option('heat', 'insecure')
}
self._heat = heatclient.Client(**args)
return self._heat
@exception.wrap_keystone_exception
def glance(self):
if self._glance:
return self._glance
endpoint_type = self._get_client_option('glance', 'endpoint_type')
region_name = self._get_client_option('glance', 'region_name')
endpoint = self.url_for(service_type='image',
endpoint_type=endpoint_type,
region_name=region_name)
args = {
'endpoint': endpoint,
'auth_url': self.auth_url,
'token': self.auth_token,
'username': None,
'password': None,
}
self._glance = glanceclient.Client(**args)
return self._glance
@exception.wrap_keystone_exception
def barbican(self):
if self._barbican:
return self._barbican
endpoint_type = self._get_client_option('barbican', 'endpoint_type')
region_name = self._get_client_option('barbican', 'region_name')
endpoint = self.url_for(service_type='key-manager',
endpoint_type=endpoint_type,
region_name=region_name)
session = self.keystone().client.session
self._barbican = barbicanclient.Client(session=session,
endpoint=endpoint)
return self._barbican
@exception.wrap_keystone_exception
def nova(self):
if self._nova:
return self._nova
endpoint_type = self._get_client_option('nova', 'endpoint_type')
region_name = self._get_client_option('nova', 'region_name')
endpoint = self.url_for(service_type='compute',
endpoint_type=endpoint_type,
region_name=region_name)
self._nova = novaclient.Client(auth_token=self.auth_token)
self._nova.client.management_url = endpoint
return self._nova
| eshijia/magnum | magnum/common/clients.py | Python | apache-2.0 | 8,321 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import textwrap
from contextlib import closing
from xml.etree import ElementTree
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.subsystems.shader import Shader
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools
from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis import ZincAnalysis
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis_parser import ZincAnalysisParser
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.hash_utils import hash_file
from pants.base.workunit import WorkUnitLabel
from pants.java.distribution.distribution import DistributionLocator
from pants.option.custom_types import dict_option
from pants.util.contextutil import open_zip
from pants.util.dirutil import relativize_paths, safe_open
# Well known metadata file required to register scalac plugins with nsc.
_PLUGIN_INFO_FILE = 'scalac-plugin.xml'
class ZincCompile(JvmCompile):
"""Compile Scala and Java code using Zinc."""
_ZINC_MAIN = 'org.pantsbuild.zinc.Main'
_name = 'zinc'
_supports_concurrent_execution = True
@staticmethod
def write_plugin_info(resources_dir, target):
root = os.path.join(resources_dir, target.id)
plugin_info_file = os.path.join(root, _PLUGIN_INFO_FILE)
with safe_open(plugin_info_file, 'w') as f:
f.write(textwrap.dedent("""
<plugin>
<name>{}</name>
<classname>{}</classname>
</plugin>
""".format(target.plugin, target.classname)).strip())
return root, plugin_info_file
@classmethod
def subsystem_dependencies(cls):
return super(ZincCompile, cls).subsystem_dependencies() + (ScalaPlatform, DistributionLocator)
@classmethod
def get_args_default(cls, bootstrap_option_values):
return ('-S-encoding', '-SUTF-8', '-S-g:vars')
@classmethod
def get_warning_args_default(cls):
return ('-S-deprecation', '-S-unchecked')
@classmethod
def get_no_warning_args_default(cls):
return ('-S-nowarn',)
@classmethod
def register_options(cls, register):
super(ZincCompile, cls).register_options(register)
register('--plugins', advanced=True, action='append', fingerprint=True,
help='Use these scalac plugins.')
register('--plugin-args', advanced=True, type=dict_option, default={}, fingerprint=True,
help='Map from plugin name to list of arguments for that plugin.')
register('--name-hashing', advanced=True, action='store_true', default=False, fingerprint=True,
help='Use zinc name hashing.')
cls.register_jvm_tool(register,
'zinc',
classpath=[
JarDependency('org.pantsbuild', 'zinc', '1.0.8')
],
main=cls._ZINC_MAIN,
custom_rules=[
# The compiler-interface and sbt-interface tool jars carry xsbt and
# xsbti interfaces that are used across the shaded tool jar boundary so
# we preserve these root packages wholesale along with the core scala
# APIs.
Shader.exclude_package('scala', recursive=True),
Shader.exclude_package('xsbt', recursive=True),
Shader.exclude_package('xsbti', recursive=True),
])
def sbt_jar(name, **kwargs):
return JarDependency(org='com.typesafe.sbt', name=name, rev='0.13.9', **kwargs)
cls.register_jvm_tool(register,
'compiler-interface',
classpath=[
sbt_jar(name='compiler-interface',
classifier='sources',
# We just want the single compiler-interface jar and not its
# dep on scala-lang
intransitive=True)
])
cls.register_jvm_tool(register,
'sbt-interface',
classpath=[
sbt_jar(name='sbt-interface',
# We just want the single sbt-interface jar and not its dep
# on scala-lang
intransitive=True)
])
# By default we expect no plugin-jars classpath_spec is filled in by the user, so we accept an
# empty classpath.
cls.register_jvm_tool(register, 'plugin-jars', classpath=[])
def select(self, target):
return target.has_sources('.java') or target.has_sources('.scala')
def select_source(self, source_file_path):
return source_file_path.endswith('.java') or source_file_path.endswith('.scala')
def __init__(self, *args, **kwargs):
super(ZincCompile, self).__init__(*args, **kwargs)
# A directory independent of any other classpath which can contain per-target
# plugin resource files.
self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info')
self._lazy_plugin_args = None
def create_analysis_tools(self):
return AnalysisTools(DistributionLocator.cached().real_home, ZincAnalysisParser(), ZincAnalysis)
def zinc_classpath(self):
# Zinc takes advantage of tools.jar if it's presented in classpath.
# For example com.sun.tools.javac.Main is used for in process java compilation.
def locate_tools_jar():
try:
return DistributionLocator.cached(jdk=True).find_libs(['tools.jar'])
except DistributionLocator.Error:
self.context.log.info('Failed to locate tools.jar. '
'Install a JDK to increase performance of Zinc.')
return []
return self.tool_classpath('zinc') + locate_tools_jar()
def compiler_classpath(self):
return ScalaPlatform.global_instance().compiler_classpath(self.context.products)
def extra_compile_time_classpath_elements(self):
# Classpath entries necessary for our compiler plugins.
return self.plugin_jars()
def plugin_jars(self):
"""The classpath entries for jars containing code for enabled plugins."""
if self.get_options().plugins:
return self.tool_classpath('plugin-jars')
else:
return []
def plugin_args(self):
if self._lazy_plugin_args is None:
self._lazy_plugin_args = self._create_plugin_args()
return self._lazy_plugin_args
def _create_plugin_args(self):
if not self.get_options().plugins:
return []
plugin_args = self.get_options().plugin_args
active_plugins = self._find_plugins()
ret = []
for name, jar in active_plugins.items():
ret.append('-S-Xplugin:{}'.format(jar))
for arg in plugin_args.get(name, []):
ret.append('-S-P:{}:{}'.format(name, arg))
return ret
def _find_plugins(self):
"""Returns a map from plugin name to plugin jar."""
# Allow multiple flags and also comma-separated values in a single flag.
plugin_names = set([p for val in self.get_options().plugins for p in val.split(',')])
plugins = {}
buildroot = get_buildroot()
for jar in self.plugin_jars():
with open_zip(jar, 'r') as jarfile:
try:
with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file:
plugin_info = ElementTree.parse(plugin_info_file).getroot()
if plugin_info.tag != 'plugin':
raise TaskError(
'File {} in {} is not a valid scalac plugin descriptor'.format(_PLUGIN_INFO_FILE,
jar))
name = plugin_info.find('name').text
if name in plugin_names:
if name in plugins:
raise TaskError('Plugin {} defined in {} and in {}'.format(name, plugins[name], jar))
# It's important to use relative paths, as the compiler flags get embedded in the zinc
# analysis file, and we port those between systems via the artifact cache.
plugins[name] = os.path.relpath(jar, buildroot)
except KeyError:
pass
unresolved_plugins = plugin_names - set(plugins.keys())
if unresolved_plugins:
raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins)))
return plugins
def extra_products(self, target):
"""Override extra_products to produce a plugin information file."""
ret = []
if target.is_scalac_plugin and target.classname:
# NB: We don't yet support explicit in-line compilation of scala compiler plugins from
# the workspace to be used in subsequent compile rounds like we do for annotation processors
# with javac. This would require another GroupTask similar to AptCompile, but for scala.
root, plugin_info_file = self.write_plugin_info(self._plugin_info_dir, target)
ret.append((root, [plugin_info_file]))
return ret
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file,
log_file, settings):
# We add compiler_classpath to ensure the scala-library jar is on the classpath.
# TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't
# usually need. Be more selective?
# TODO(John Sirois): Do we need to do this at all? If adding scala-library to the classpath is
# only intended to allow target authors to omit a scala-library dependency, then ScalaLibrary
# already overrides traversable_dependency_specs to achieve the same end; arguably at a more
# appropriate level and certainly at a more appropriate granularity.
relativized_classpath = relativize_paths(self.compiler_classpath() + classpath, get_buildroot())
zinc_args = []
zinc_args.extend([
'-log-level', self.get_options().level,
'-analysis-cache', analysis_file,
'-classpath', ':'.join(relativized_classpath),
'-d', classes_output_dir
])
if not self.get_options().colors:
zinc_args.append('-no-color')
if not self.get_options().name_hashing:
zinc_args.append('-no-name-hashing')
if log_file:
zinc_args.extend(['-capture-log', log_file])
zinc_args.extend(['-compiler-interface', self.tool_jar('compiler-interface')])
zinc_args.extend(['-sbt-interface', self.tool_jar('sbt-interface')])
zinc_args.extend(['-scala-path', ':'.join(self.compiler_classpath())])
zinc_args += self.plugin_args()
if upstream_analysis:
zinc_args.extend(['-analysis-map',
','.join('{}:{}'.format(*kv) for kv in upstream_analysis.items())])
zinc_args += args
zinc_args.extend([
'-C-source', '-C{}'.format(settings.source_level),
'-C-target', '-C{}'.format(settings.target_level),
])
zinc_args.extend(settings.args)
jvm_options = list(self._jvm_options)
zinc_args.extend(sources)
self.log_zinc_file(analysis_file)
if self.runjava(classpath=self.zinc_classpath(),
main=self._ZINC_MAIN,
jvm_options=jvm_options,
args=zinc_args,
workunit_name='zinc',
workunit_labels=[WorkUnitLabel.COMPILER]):
raise TaskError('Zinc compile failed.')
def log_zinc_file(self, analysis_file):
self.context.log.debug('Calling zinc on: {} ({})'
.format(analysis_file,
hash_file(analysis_file).upper()
if os.path.exists(analysis_file)
else 'nonexistent'))
| scode/pants | src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_compile.py | Python | apache-2.0 | 12,212 |
"""
FormWizard class -- implements a multi-page form, validating between each
step and storing the form's state as HTML hidden fields so that no state is
stored on the server side.
"""
import cPickle as pickle
from django import forms
from django.conf import settings
from django.http import Http404
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils.hashcompat import md5_constructor
from django.utils.translation import ugettext_lazy as _
from django.contrib.formtools.utils import security_hash
class FormWizard(object):
# The HTML (and POST data) field name for the "step" variable.
step_field_name="wizard_step"
# METHODS SUBCLASSES SHOULDN'T OVERRIDE ###################################
def __init__(self, form_list, initial=None):
"""
Start a new wizard with a list of forms.
form_list should be a list of Form classes (not instances).
"""
self.form_list = form_list[:]
self.initial = initial or {}
# Dictionary of extra template context variables.
self.extra_context = {}
# A zero-based counter keeping track of which step we're in.
self.step = 0
def __repr__(self):
return "step: %d\nform_list: %s\ninitial_data: %s" % (self.step, self.form_list, self.initial)
def get_form(self, step, data=None):
"Helper method that returns the Form instance for the given step."
return self.form_list[step](data, prefix=self.prefix_for_step(step), initial=self.initial.get(step, None))
def num_steps(self):
"Helper method that returns the number of steps."
# You might think we should just set "self.form_list = len(form_list)"
# in __init__(), but this calculation needs to be dynamic, because some
# hook methods might alter self.form_list.
return len(self.form_list)
def __call__(self, request, *args, **kwargs):
"""
Main method that does all the hard work, conforming to the Django view
interface.
"""
if 'extra_context' in kwargs:
self.extra_context.update(kwargs['extra_context'])
current_step = self.determine_step(request, *args, **kwargs)
self.parse_params(request, *args, **kwargs)
# Sanity check.
if current_step >= self.num_steps():
raise Http404('Step %s does not exist' % current_step)
# For each previous step, verify the hash and process.
# TODO: Move "hash_%d" to a method to make it configurable.
for i in range(current_step):
form = self.get_form(i, request.POST)
if request.POST.get("hash_%d" % i, '') != self.security_hash(request, form):
return self.render_hash_failure(request, i)
self.process_step(request, form, i)
# Process the current step. If it's valid, go to the next step or call
# done(), depending on whether any steps remain.
if request.method == 'POST':
form = self.get_form(current_step, request.POST)
else:
form = self.get_form(current_step)
if form.is_valid():
self.process_step(request, form, current_step)
next_step = current_step + 1
# If this was the last step, validate all of the forms one more
# time, as a sanity check, and call done().
num = self.num_steps()
if next_step == num:
final_form_list = [self.get_form(i, request.POST) for i in range(num)]
# Validate all the forms. If any of them fail validation, that
# must mean the validator relied on some other input, such as
# an external Web site.
for i, f in enumerate(final_form_list):
if not f.is_valid():
return self.render_revalidation_failure(request, i, f)
return self.done(request, final_form_list)
# Otherwise, move along to the next step.
else:
form = self.get_form(next_step)
self.step = current_step = next_step
return self.render(form, request, current_step)
def render(self, form, request, step, context=None):
"Renders the given Form object, returning an HttpResponse."
old_data = request.POST
prev_fields = []
if old_data:
hidden = forms.HiddenInput()
# Collect all data from previous steps and render it as HTML hidden fields.
for i in range(step):
old_form = self.get_form(i, old_data)
hash_name = 'hash_%s' % i
prev_fields.extend([bf.as_hidden() for bf in old_form])
prev_fields.append(hidden.render(hash_name, old_data.get(hash_name, self.security_hash(request, old_form))))
return self.render_template(request, form, ''.join(prev_fields), step, context)
# METHODS SUBCLASSES MIGHT OVERRIDE IF APPROPRIATE ########################
def prefix_for_step(self, step):
"Given the step, returns a Form prefix to use."
return str(step)
def render_hash_failure(self, request, step):
"""
Hook for rendering a template if a hash check failed.
step is the step that failed. Any previous step is guaranteed to be
valid.
This default implementation simply renders the form for the given step,
but subclasses may want to display an error message, etc.
"""
return self.render(self.get_form(step), request, step, context={'wizard_error': _('We apologize, but your form has expired. Please continue filling out the form from this page.')})
def render_revalidation_failure(self, request, step, form):
"""
Hook for rendering a template if final revalidation failed.
It is highly unlikely that this point would ever be reached, but See
the comment in __call__() for an explanation.
"""
return self.render(form, request, step)
def security_hash(self, request, form):
"""
Calculates the security hash for the given HttpRequest and Form instances.
Subclasses may want to take into account request-specific information,
such as the IP address.
"""
return security_hash(request, form)
def determine_step(self, request, *args, **kwargs):
"""
Given the request object and whatever *args and **kwargs were passed to
__call__(), returns the current step (which is zero-based).
Note that the result should not be trusted. It may even be a completely
invalid number. It's not the job of this method to validate it.
"""
if not request.POST:
return 0
try:
step = int(request.POST.get(self.step_field_name, 0))
except ValueError:
return 0
return step
def parse_params(self, request, *args, **kwargs):
"""
Hook for setting some state, given the request object and whatever
*args and **kwargs were passed to __call__(), sets some state.
This is called at the beginning of __call__().
"""
pass
def get_template(self, step):
"""
Hook for specifying the name of the template to use for a given step.
Note that this can return a tuple of template names if you'd like to
use the template system's select_template() hook.
"""
return 'forms/wizard.html'
def render_template(self, request, form, previous_fields, step, context=None):
"""
Renders the template for the given step, returning an HttpResponse object.
Override this method if you want to add a custom context, return a
different MIME type, etc. If you only need to override the template
name, use get_template() instead.
The template will be rendered with the following context:
step_field -- The name of the hidden field containing the step.
step0 -- The current step (zero-based).
step -- The current step (one-based).
step_count -- The total number of steps.
form -- The Form instance for the current step (either empty
or with errors).
previous_fields -- A string representing every previous data field,
plus hashes for completed forms, all in the form of
hidden fields. Note that you'll need to run this
through the "safe" template filter, to prevent
auto-escaping, because it's raw HTML.
"""
context = context or {}
context.update(self.extra_context)
return render_to_response(self.get_template(step), dict(context,
step_field=self.step_field_name,
step0=step,
step=step + 1,
step_count=self.num_steps(),
form=form,
previous_fields=previous_fields
), context_instance=RequestContext(request))
def process_step(self, request, form, step):
"""
Hook for modifying the FormWizard's internal state, given a fully
validated Form object. The Form is guaranteed to have clean, valid
data.
This method should *not* modify any of that data. Rather, it might want
to set self.extra_context or dynamically alter self.form_list, based on
previously submitted forms.
Note that this method is called every time a page is rendered for *all*
submitted steps.
"""
pass
# METHODS SUBCLASSES MUST OVERRIDE ########################################
def done(self, request, form_list):
"""
Hook for doing something with the validated data. This is responsible
for the final processing.
form_list is a list of Form instances, each containing clean, valid
data.
"""
raise NotImplementedError("Your %s class has not defined a done() method, which is required." % self.__class__.__name__)
| sanjuro/RCJK | vendor/django/contrib/formtools/wizard.py | Python | apache-2.0 | 10,209 |
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
from .common import BaseTest, functional
from c7n.resources.aws import shape_validate
from c7n.utils import yaml_load
class TestSNS(BaseTest):
@functional
def test_sns_remove_matched(self):
session_factory = self.replay_flight_data("test_sns_remove_matched")
client = session_factory().client("sns")
name = "test-sns-remove-matched"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::644160558196:root"},
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "Public",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-rm-matched",
"resource": "sns",
"filters": [
{"TopicArn": topic_arn},
{"type": "cross-account", "whitelist": ["123456789012"]},
],
"actions": [{"type": "remove-statements", "statement_ids": "matched"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual([r["TopicArn"] for r in resources], [topic_arn])
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertEqual(
[s["Sid"] for s in data.get("Statement", ())], ["SpecificAllow"]
)
@functional
def test_sns_remove_named(self):
session_factory = self.replay_flight_data("test_sns_remove_named")
client = session_factory().client("sns")
name = "test-sns-remove-named"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-rm-named",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{"type": "remove-statements", "statement_ids": ["RemoveMe"]}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("RemoveMe" not in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_replace_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_replace_policy")
client = session_factory().client("sns")
name = "test_sns_modify_replace_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-replace-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "ReplaceWithMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": "*",
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue(
"ReplaceWithMe" in [s["Sid"] for s in data.get("Statement", ())]
)
@functional
def test_sns_account_id_template(self):
session_factory = self.replay_flight_data("test_sns_account_id_template")
client = session_factory().client("sns")
name = "test_sns_account_id_template"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-replace-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "__default_statement_ID_{account_id}",
"Effect": "Allow",
"Principal": {"Service": "s3.amazonaws.com"},
"Action": "SNS:Publish",
"Resource": topic_arn,
"Condition": {
"StringEquals": {
"AWS:SourceAccount": "{account_id}"
},
"ArnLike": {"aws:SourceArn": "arn:aws:s3:*:*:*"},
},
}
],
"remove-statements": "*",
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue(
"__default_statement_ID_" +
self.account_id in [s["Sid"] for s in data.get("Statement", ())]
)
@functional
def test_sns_modify_remove_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_remove_policy")
client = session_factory().client("sns")
name = "test_sns_modify_remove_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-remove-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [],
"remove-statements": ["RemoveMe"],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("RemoveMe" not in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_add_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_add_policy")
client = session_factory().client("sns")
name = "test_sns_modify_add_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-add-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "AddMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": [],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("AddMe" in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_add_and_remove_policy(self):
session_factory = self.replay_flight_data(
"test_sns_modify_add_and_remove_policy"
)
client = session_factory().client("sns")
name = "test_sns_modify_add_and_remove_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-add-and-remove-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "AddMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": ["RemoveMe"],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
statement_ids = {s["Sid"] for s in data.get("Statement", ())}
self.assertTrue("AddMe" in statement_ids)
self.assertTrue("RemoveMe" not in statement_ids)
self.assertTrue("SpecificAllow" in statement_ids)
def test_sns_topic_encryption(self):
session_factory = self.replay_flight_data('test_sns_kms_related_filter_test')
kms = session_factory().client('kms')
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': 'arn:aws:sns:us-east-1:644160558196:test'
},
{
'type': 'kms-key',
'key': 'c7n:AliasName',
'value': 'alias/skunk/trails'
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertTrue(len(resources), 1)
aliases = kms.list_aliases(KeyId=resources[0]['KmsMasterKeyId'])
self.assertEqual(aliases['Aliases'][0]['AliasName'], 'alias/skunk/trails')
def test_set_sns_topic_encryption(self):
session_factory = self.replay_flight_data('test_sns_set_encryption')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'absent'
}
],
'actions': [
{
'type': 'set-encryption'
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
sns = session_factory().client('sns')
attributes = sns.get_topic_attributes(TopicArn=topic)
self.assertTrue(attributes['Attributes']['KmsMasterKeyId'], 'alias/aws/sns')
def test_sns_disable_encryption(self):
session_factory = self.replay_flight_data('test_sns_unset_encryption')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'alias/aws/sns'
}
],
'actions': [
{
'type': 'set-encryption',
'enabled': False
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
sns = session_factory().client('sns')
attributes = sns.get_topic_attributes(TopicArn=topic)['Attributes']
self.assertFalse(attributes.get('KmsMasterKeyId'))
def test_sns_set_encryption_custom_key(self):
session_factory = self.replay_flight_data('test_sns_set_encryption_custom_key')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
key_alias = 'alias/alias/test/key'
sns = session_factory().client('sns')
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter-alias',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'absent'
}
],
'actions': [
{
'type': 'set-encryption',
'key': key_alias
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
attributes = sns.get_topic_attributes(TopicArn=topic)['Attributes']
self.assertEqual(attributes.get('KmsMasterKeyId'), key_alias)
def test_sns_delete(self):
session_factory = self.replay_flight_data('test_sns_delete_topic')
policy = """
name: delete-sns
resource: aws.sns
filters:
- TopicArn: arn:aws:sns:us-west-1:644160558196:test
actions:
- type: delete
"""
p = self.load_policy(yaml_load(policy), session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client('sns')
resources = client.list_topics()['Topics']
self.assertEqual(len(resources), 0)
def test_sns_tag(self):
session_factory = self.replay_flight_data("test_sns_tag")
p = self.load_policy(
{
"name": "tag-sns",
"resource": "sns",
"filters": [{"tag:Tagging": "absent"}],
"actions": [{"type": "tag", "key": "Tagging", "value": "added"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertEqual(tags[0]["Value"], "added")
def test_sns_remove_tag(self):
session_factory = self.replay_flight_data(
"test_sns_remove_tag")
p = self.load_policy(
{
"name": "untag-sns",
"resource": "sns",
"filters": [
{
"type": "marked-for-op",
"tag": "custodian_cleanup",
"op": "delete",
}
],
"actions": [{"type": "remove-tag", "tags": ["custodian_cleanup"]}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertEqual(len(tags), 0)
def test_sns_mark_for_op(self):
session_factory = self.replay_flight_data(
"test_sns_mark_for_op"
)
p = self.load_policy(
{
"name": "sns-untagged-delete",
"resource": "sns",
"filters": [
{"tag:Tagging": "absent"},
{"tag:custodian_cleanup": "absent"},
],
"actions": [
{
"type": "mark-for-op",
"tag": "custodian_cleanup",
"op": "delete",
"days": 1,
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertTrue(tags[0]["Key"], "custodian_cleanup")
def test_sns_post_finding(self):
factory = self.replay_flight_data('test_sns_post_finding')
p = self.load_policy({
'name': 'sns',
'resource': 'aws.sns',
'actions': [
{'type': 'post-finding',
'types': [
'Software and Configuration Checks/OrgStandard/abc-123']}]},
session_factory=factory, config={'region': 'us-west-2'})
resources = p.resource_manager.get_resources([
'arn:aws:sns:us-west-2:644160558196:config-topic'])
rfinding = p.resource_manager.actions[0].format_resource(
resources[0])
self.assertEqual(
rfinding,
{'Details': {'AwsSnsTopic': {
'Owner': '644160558196',
'TopicName': 'config-topic'}},
'Id': 'arn:aws:sns:us-west-2:644160558196:config-topic',
'Partition': 'aws',
'Region': 'us-west-2',
'Type': 'AwsSnsTopic'})
shape_validate(
rfinding['Details']['AwsSnsTopic'],
'AwsSnsTopicDetails', 'securityhub')
def test_sns_config(self):
session_factory = self.replay_flight_data("test_sns_config")
p = self.load_policy(
{"name": "sns-config",
"source": "config",
"resource": "sns"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 2)
self.assertEqual(resources[0]['Tags'][0]['Value'], 'false')
class TestSubscription(BaseTest):
def test_subscription_delete(self):
factory = self.replay_flight_data("test_subscription_delete")
p = self.load_policy(
{
"name": "external-owner-delete",
"resource": "sns-subscription",
"filters": [
{
"type": "value",
"key": "Owner",
"value": "123456789099",
"op": "ne",
}
],
"actions": [{"type": "delete"}],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertNotEqual(resources[0]["Owner"], "123456789099")
client = factory().client("sns")
subs = client.list_subscriptions()
for s in subs.get("Subscriptions", []):
self.assertTrue("123456789099" == s.get("Owner"))
| thisisshi/cloud-custodian | tests/test_sns.py | Python | apache-2.0 | 26,225 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
import os.path
import mock
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import testtools
import webob
import webob.dec
import webob.exc
from neutron.agent.common import config as agent_config
from neutron.agent.common import ovs_lib
from neutron.agent.l3 import agent as neutron_l3_agent
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespace_manager
from neutron.agent.l3 import namespaces
from neutron.agent import l3_agent as l3_agent_main
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.callbacks import events
from neutron.callbacks import manager
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import config as common_config
from neutron.common import constants as l3_constants
from neutron.common import utils as common_utils
from neutron.openstack.common import uuidutils
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux import base
from neutron.tests.functional.agent.linux import helpers
from neutron.tests.unit.agent.l3 import test_agent as test_l3_agent
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
METADATA_REQUEST_TIMEOUT = 60
def get_ovs_bridge(br_name):
return ovs_lib.OVSBridge(br_name)
class L3AgentTestFramework(base.BaseLinuxTestCase):
def setUp(self):
super(L3AgentTestFramework, self).setUp()
mock.patch('neutron.agent.l3.agent.L3PluginApi').start()
# TODO(pcm): Move this to BaseTestCase, if we find that more tests
# use this mechanism.
self._callback_manager = manager.CallbacksManager()
mock.patch.object(registry, '_get_callback_manager',
return_value=self._callback_manager).start()
self.agent = self._configure_agent('agent1')
def _get_config_opts(self):
config = cfg.ConfigOpts()
config.register_opts(common_config.core_opts)
config.register_opts(common_config.core_cli_opts)
logging.register_options(config)
agent_config.register_process_monitor_opts(config)
return config
def _configure_agent(self, host):
conf = self._get_config_opts()
l3_agent_main.register_opts(conf)
cfg.CONF.set_override('debug', False)
agent_config.setup_logging()
conf.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
conf.set_override('router_delete_namespaces', True)
br_int = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
br_ex = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
conf.set_override('ovs_integration_bridge', br_int.br_name)
conf.set_override('external_network_bridge', br_ex.br_name)
temp_dir = self.get_new_temp_dir()
get_temp_file_path = functools.partial(self.get_temp_file_path,
root=temp_dir)
conf.set_override('state_path', temp_dir.path)
conf.set_override('metadata_proxy_socket',
get_temp_file_path('metadata_proxy'))
conf.set_override('ha_confs_path',
get_temp_file_path('ha_confs'))
conf.set_override('external_pids',
get_temp_file_path('external/pids'))
conf.set_override('host', host)
agent = neutron_l3_agent.L3NATAgentWithStateReport(host, conf)
mock.patch.object(ip_lib, '_arping').start()
return agent
def generate_router_info(self, enable_ha, ip_version=4, extra_routes=True,
enable_fip=True, enable_snat=True,
dual_stack=False, v6_ext_gw_with_sub=True):
if ip_version == 6 and not dual_stack:
enable_snat = False
enable_fip = False
extra_routes = False
if not v6_ext_gw_with_sub:
self.agent.conf.set_override('ipv6_gateway',
'fe80::f816:3eff:fe2e:1')
return test_l3_agent.prepare_router_data(ip_version=ip_version,
enable_snat=enable_snat,
enable_floating_ip=enable_fip,
enable_ha=enable_ha,
extra_routes=extra_routes,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
def manage_router(self, agent, router):
self.addCleanup(self._delete_router, agent, router['id'])
ri = self._create_router(agent, router)
return ri
def _create_router(self, agent, router):
agent._process_added_router(router)
return agent.router_info[router['id']]
def _delete_router(self, agent, router_id):
agent._router_removed(router_id)
def _add_fip(self, router, fip_address, fixed_address='10.0.0.2',
host=None):
fip = {'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': fip_address,
'fixed_ip_address': fixed_address,
'host': host}
router.router[l3_constants.FLOATINGIP_KEY].append(fip)
def _add_internal_interface_by_subnet(self, router, count=1,
ip_version=4,
ipv6_subnet_modes=None,
interface_id=None):
return test_l3_agent.router_append_subnet(router, count,
ip_version, ipv6_subnet_modes, interface_id)
def _namespace_exists(self, namespace):
ip = ip_lib.IPWrapper(namespace=namespace)
return ip.netns.exists(namespace)
def _metadata_proxy_exists(self, conf, router):
pm = external_process.ProcessManager(
conf,
router.router_id,
router.ns_name)
return pm.active
def device_exists_with_ips_and_mac(self, expected_device, name_getter,
namespace):
ip_cidrs = common_utils.fixed_ip_cidrs(expected_device['fixed_ips'])
return ip_lib.device_exists_with_ips_and_mac(
name_getter(expected_device['id']), ip_cidrs,
expected_device['mac_address'], namespace)
@staticmethod
def _port_first_ip_cidr(port):
fixed_ip = port['fixed_ips'][0]
return common_utils.ip_to_cidr(fixed_ip['ip_address'],
fixed_ip['prefixlen'])
def get_device_mtu(self, target_device, name_getter, namespace):
device = ip_lib.IPDevice(name_getter(target_device), namespace)
return device.link.mtu
def get_expected_keepalive_configuration(self, router):
router_id = router.router_id
ha_device_name = router.get_ha_device_name()
ha_device_cidr = self._port_first_ip_cidr(router.ha_port)
external_port = router.get_ex_gw_port()
ex_port_ipv6 = ip_lib.get_ipv6_lladdr(external_port['mac_address'])
external_device_name = router.get_external_device_name(
external_port['id'])
external_device_cidr = self._port_first_ip_cidr(external_port)
internal_port = router.router[l3_constants.INTERFACE_KEY][0]
int_port_ipv6 = ip_lib.get_ipv6_lladdr(internal_port['mac_address'])
internal_device_name = router.get_internal_device_name(
internal_port['id'])
internal_device_cidr = self._port_first_ip_cidr(internal_port)
floating_ip_cidr = common_utils.ip_to_cidr(
router.get_floating_ips()[0]['floating_ip_address'])
default_gateway_ip = external_port['subnets'][0].get('gateway_ip')
return """vrrp_instance VR_1 {
state BACKUP
interface %(ha_device_name)s
virtual_router_id 1
priority 50
nopreempt
advert_int 2
track_interface {
%(ha_device_name)s
}
virtual_ipaddress {
169.254.0.1/24 dev %(ha_device_name)s
}
virtual_ipaddress_excluded {
%(floating_ip_cidr)s dev %(external_device_name)s
%(external_device_cidr)s dev %(external_device_name)s
%(internal_device_cidr)s dev %(internal_device_name)s
%(ex_port_ipv6)s dev %(external_device_name)s scope link
%(int_port_ipv6)s dev %(internal_device_name)s scope link
}
virtual_routes {
0.0.0.0/0 via %(default_gateway_ip)s dev %(external_device_name)s
8.8.8.0/24 via 19.4.4.4
}
}""" % {
'router_id': router_id,
'ha_device_name': ha_device_name,
'ha_device_cidr': ha_device_cidr,
'external_device_name': external_device_name,
'external_device_cidr': external_device_cidr,
'internal_device_name': internal_device_name,
'internal_device_cidr': internal_device_cidr,
'floating_ip_cidr': floating_ip_cidr,
'default_gateway_ip': default_gateway_ip,
'int_port_ipv6': int_port_ipv6,
'ex_port_ipv6': ex_port_ipv6
}
def _get_rule(self, iptables_manager, table, chain, predicate):
rules = iptables_manager.get_chain(table, chain)
result = next(rule for rule in rules if predicate(rule))
return result
def _assert_router_does_not_exist(self, router):
# If the namespace assertion succeeds
# then the devices and iptable rules have also been deleted,
# so there's no need to check that explicitly.
self.assertFalse(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: not self._metadata_proxy_exists(self.agent.conf, router))
def _assert_snat_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'snat'))
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'POSTROUTING'))
def _assert_floating_ip_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'float-snat'))
def _assert_metadata_chains(self, router):
metadata_port_filter = lambda rule: (
str(self.agent.conf.metadata_port) in rule.rule)
self.assertTrue(self._get_rule(router.iptables_manager,
'nat',
'PREROUTING',
metadata_port_filter))
self.assertTrue(self._get_rule(router.iptables_manager,
'filter',
'INPUT',
metadata_port_filter))
def _assert_internal_devices(self, router):
internal_devices = router.router[l3_constants.INTERFACE_KEY]
self.assertTrue(len(internal_devices))
for device in internal_devices:
self.assertTrue(self.device_exists_with_ips_and_mac(
device, router.get_internal_device_name, router.ns_name))
def _assert_extra_routes(self, router):
routes = ip_lib.get_routing_table(namespace=router.ns_name)
routes = [{'nexthop': route['nexthop'],
'destination': route['destination']} for route in routes]
for extra_route in router.router['routes']:
self.assertIn(extra_route, routes)
def _assert_interfaces_deleted_from_ovs(self):
def assert_ovs_bridge_empty(bridge_name):
bridge = ovs_lib.OVSBridge(bridge_name)
self.assertFalse(bridge.get_port_name_list())
assert_ovs_bridge_empty(self.agent.conf.ovs_integration_bridge)
assert_ovs_bridge_empty(self.agent.conf.external_network_bridge)
def floating_ips_configured(self, router):
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
external_port = router.get_ex_gw_port()
return len(floating_ips) and all(
ip_lib.device_exists_with_ips_and_mac(
router.get_external_device_name(external_port['id']),
['%s/32' % fip['floating_ip_address']],
external_port['mac_address'],
namespace=router.ns_name) for fip in floating_ips)
def fail_ha_router(self, router):
device_name = router.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, router.ns_name)
ha_device.link.set_down()
class L3AgentTestCase(L3AgentTestFramework):
def test_keepalived_state_change_notification(self):
enqueue_mock = mock.patch.object(
self.agent, 'enqueue_state_change').start()
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router.ha_state == 'master')
self.fail_ha_router(router)
utils.wait_until_true(lambda: router.ha_state == 'backup')
utils.wait_until_true(lambda: enqueue_mock.call_count == 3)
calls = [args[0] for args in enqueue_mock.call_args_list]
self.assertEqual((router.router_id, 'backup'), calls[0])
self.assertEqual((router.router_id, 'master'), calls[1])
self.assertEqual((router.router_id, 'backup'), calls[2])
def _expected_rpc_report(self, expected):
calls = (args[0][1] for args in
self.agent.plugin_rpc.update_ha_routers_states.call_args_list)
# Get the last state reported for each router
actual_router_states = {}
for call in calls:
for router_id, state in call.iteritems():
actual_router_states[router_id] = state
return actual_router_states == expected
def test_keepalived_state_change_bulk_rpc(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self.fail_ha_router(router1)
router_info = self.generate_router_info(enable_ha=True)
router2 = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router1.ha_state == 'backup')
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(
lambda: self._expected_rpc_report(
{router1.router_id: 'standby', router2.router_id: 'active'}))
def test_agent_notifications_for_router_events(self):
"""Test notifications for router create, update, and delete.
Make sure that when the agent sends notifications of router events
for router create, update, and delete, that the correct handler is
called with the right resource, event, and router information.
"""
event_handler = mock.Mock()
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_DELETE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_DELETE)
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self.agent._process_updated_router(router.router)
self._delete_router(self.agent, router.router_id)
expected_calls = [
mock.call('router', 'before_create', self.agent, router=router),
mock.call('router', 'after_create', self.agent, router=router),
mock.call('router', 'before_update', self.agent, router=router),
mock.call('router', 'after_update', self.agent, router=router),
mock.call('router', 'before_delete', self.agent, router=router),
mock.call('router', 'after_delete', self.agent, router=router)]
event_handler.assert_has_calls(expected_calls)
def test_legacy_router_lifecycle(self):
self._router_lifecycle(enable_ha=False, dual_stack=True)
def test_legacy_router_lifecycle_with_no_gateway_subnet(self):
self._router_lifecycle(enable_ha=False, dual_stack=True,
v6_ext_gw_with_sub=False)
def test_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True)
def test_conntrack_disassociate_fip(self):
'''Test that conntrack immediately drops stateful connection
that uses floating IP once it's disassociated.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
port = helpers.get_free_namespace_port(router.ns_name)
client_address = '19.4.4.3'
server_address = '35.4.0.4'
def clean_fips(router):
router.router[l3_constants.FLOATINGIP_KEY] = []
clean_fips(router)
self._add_fip(router, client_address, fixed_address=server_address)
router.process(self.agent)
router_ns = ip_lib.IPWrapper(namespace=router.ns_name)
netcat = helpers.NetcatTester(router_ns, router_ns,
server_address, port,
client_address=client_address,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
def assert_num_of_conntrack_rules(n):
out = router_ns.netns.execute(["conntrack", "-L",
"--orig-src", client_address])
self.assertEqual(
n, len([line for line in out.strip().split('\n') if line]))
with self.assert_max_execution_time(100):
assert_num_of_conntrack_rules(0)
self.assertTrue(netcat.test_connectivity())
assert_num_of_conntrack_rules(1)
clean_fips(router)
router.process(self.agent)
assert_num_of_conntrack_rules(0)
with testtools.ExpectedException(RuntimeError):
netcat.test_connectivity()
def test_ipv6_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True, ip_version=6)
def test_keepalived_configuration(self):
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
expected = self.get_expected_keepalive_configuration(router)
self.assertEqual(expected,
router.keepalived_manager.get_conf_on_disk())
# Add a new FIP and change the GW IP address
router.router = copy.deepcopy(router.router)
existing_fip = '19.4.4.2'
new_fip = '19.4.4.3'
self._add_fip(router, new_fip)
subnet_id = _uuid()
fixed_ips = [{'ip_address': '19.4.4.10',
'prefixlen': 24,
'subnet_id': subnet_id}]
subnets = [{'id': subnet_id,
'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.5'}]
router.router['gw_port']['subnets'] = subnets
router.router['gw_port']['fixed_ips'] = fixed_ips
router.process(self.agent)
# Get the updated configuration and assert that both FIPs are in,
# and that the GW IP address was updated.
new_config = router.keepalived_manager.config.get_config_str()
old_gw = '0.0.0.0/0 via 19.4.4.1'
new_gw = '0.0.0.0/0 via 19.4.4.5'
old_external_device_ip = '19.4.4.4'
new_external_device_ip = '19.4.4.10'
self.assertIn(existing_fip, new_config)
self.assertIn(new_fip, new_config)
self.assertNotIn(old_gw, new_config)
self.assertIn(new_gw, new_config)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
self.assertNotIn('%s/24 dev %s' %
(old_external_device_ip, external_device_name),
new_config)
self.assertIn('%s/24 dev %s' %
(new_external_device_ip, external_device_name),
new_config)
def test_periodic_sync_routers_task(self):
routers_to_keep = []
routers_to_delete = []
ns_names_to_retrieve = set()
for i in range(2):
routers_to_keep.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_keep[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_keep[i]['id'])
for i in range(2):
routers_to_delete.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_delete[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_delete[i]['id'])
# Mock the plugin RPC API to Simulate a situation where the agent
# was handling the 4 routers created above, it went down and after
# starting up again, two of the routers were deleted via the API
mocked_get_routers = (
neutron_l3_agent.L3PluginApi.return_value.get_routers)
mocked_get_routers.return_value = routers_to_keep
# Synchonize the agent with the plug-in
with mock.patch.object(namespace_manager.NamespaceManager, 'list_all',
return_value=ns_names_to_retrieve):
self.agent.periodic_sync_routers_task(self.agent.context)
# Mock the plugin RPC API so a known external network id is returned
# when the router updates are processed by the agent
external_network_id = _uuid()
mocked_get_external_network_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_get_external_network_id.return_value = external_network_id
# Plug external_gateway_info in the routers that are not going to be
# deleted by the agent when it processes the updates. Otherwise,
# _process_router_if_compatible in the agent fails
for i in range(2):
routers_to_keep[i]['external_gateway_info'] = {'network_id':
external_network_id}
# Have the agent process the update from the plug-in and verify
# expected behavior
for _ in routers_to_keep + routers_to_delete:
self.agent._process_router_update()
for i in range(2):
self.assertIn(routers_to_keep[i]['id'], self.agent.router_info)
self.assertTrue(self._namespace_exists(namespaces.NS_PREFIX +
routers_to_keep[i]['id']))
for i in range(2):
self.assertNotIn(routers_to_delete[i]['id'],
self.agent.router_info)
self.assertFalse(self._namespace_exists(
namespaces.NS_PREFIX + routers_to_delete[i]['id']))
def _router_lifecycle(self, enable_ha, ip_version=4,
dual_stack=False, v6_ext_gw_with_sub=True):
router_info = self.generate_router_info(enable_ha, ip_version,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
router = self.manage_router(self.agent, router_info)
# Add multiple-IPv6-prefix internal router port
slaac = l3_constants.IPV6_SLAAC
slaac_mode = {'ra_mode': slaac, 'address_mode': slaac}
subnet_modes = [slaac_mode] * 2
self._add_internal_interface_by_subnet(router.router, count=2,
ip_version=6, ipv6_subnet_modes=subnet_modes)
router.process(self.agent)
if enable_ha:
port = router.get_ex_gw_port()
interface_name = router.get_external_device_name(port['id'])
self._assert_no_ip_addresses_on_interface(router.ns_name,
interface_name)
utils.wait_until_true(lambda: router.ha_state == 'master')
# Keepalived notifies of a state transition when it starts,
# not when it ends. Thus, we have to wait until keepalived finishes
# configuring everything. We verify this by waiting until the last
# device has an IP address.
device = router.router[l3_constants.INTERFACE_KEY][-1]
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
device,
router.get_internal_device_name,
router.ns_name)
utils.wait_until_true(device_exists)
self.assertTrue(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_external_device(router)
if not (enable_ha and (ip_version == 6 or dual_stack)):
# Note(SridharG): enable the assert_gateway for IPv6 once
# keepalived on Ubuntu14.04 (i.e., check-neutron-dsvm-functional
# platform) is updated to 1.2.10 (or above).
# For more details: https://review.openstack.org/#/c/151284/
self._assert_gateway(router, v6_ext_gw_with_sub)
self.assertTrue(self.floating_ips_configured(router))
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_extra_routes(router)
self._assert_metadata_chains(router)
if enable_ha:
self._assert_ha_device(router)
self.assertTrue(router.keepalived_manager.get_process().active)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
if enable_ha:
self.assertFalse(router.keepalived_manager.get_process().active)
def _assert_external_device(self, router):
external_port = router.get_ex_gw_port()
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
router.ns_name))
def _assert_gateway(self, router, v6_ext_gw_with_sub=True):
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=router.ns_name)
for subnet in external_port['subnets']:
self._gateway_check(subnet['gateway_ip'], external_device)
if not v6_ext_gw_with_sub:
self._gateway_check(self.agent.conf.ipv6_gateway,
external_device)
def _gateway_check(self, gateway_ip, external_device):
expected_gateway = gateway_ip
ip_vers = netaddr.IPAddress(expected_gateway).version
existing_gateway = (external_device.route.get_gateway(
ip_version=ip_vers).get('gateway'))
self.assertEqual(expected_gateway, existing_gateway)
def _assert_ha_device(self, router):
def ha_router_dev_name_getter(not_used):
return router.get_ha_device_name()
self.assertTrue(self.device_exists_with_ips_and_mac(
router.router[l3_constants.HA_INTERFACE_KEY],
ha_router_dev_name_getter, router.ns_name))
@classmethod
def _get_addresses_on_device(cls, namespace, interface):
return [address['cidr'] for address in
ip_lib.IPDevice(interface, namespace=namespace).addr.list()]
def _assert_no_ip_addresses_on_interface(self, namespace, interface):
self.assertEqual(
[], self._get_addresses_on_device(namespace, interface))
def test_ha_router_conf_on_restarted_agent(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12')
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self._create_router(restarted_agent, router1.router)
utils.wait_until_true(lambda: self.floating_ips_configured(router1))
self.assertIn(
router1._get_primary_vip(),
self._get_addresses_on_device(
router1.ns_name,
router1.get_ha_device_name()))
def test_fip_connection_from_same_subnet(self):
'''Test connection to floatingip which is associated with
fixed_ip on the same subnet of the source fixed_ip.
In other words it confirms that return packets surely
go through the router.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
router_ip = router_ip_cidr.partition('/')[0]
src_ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
dst_ip_cidr = net_helpers.increment_ip_cidr(src_ip_cidr)
dst_ip = dst_ip_cidr.partition('/')[0]
dst_fip = '19.4.4.10'
router.router[l3_constants.FLOATINGIP_KEY] = []
self._add_fip(router, dst_fip, fixed_address=dst_ip)
router.process(self.agent)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by fake machines
src_ns = self._create_namespace(prefix='test-src-')
src_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, src_ns.namespace)).port
src_port.addr.add(src_ip_cidr)
net_helpers.set_namespace_gateway(src_port, router_ip)
dst_ns = self._create_namespace(prefix='test-dst-')
dst_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, dst_ns.namespace)).port
dst_port.addr.add(dst_ip_cidr)
net_helpers.set_namespace_gateway(dst_port, router_ip)
protocol_port = helpers.get_free_namespace_port(dst_ns)
# client sends to fip
netcat = helpers.NetcatTester(src_ns, dst_ns, dst_ip,
protocol_port,
client_address=dst_fip,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
self.assertTrue(netcat.test_connectivity())
class L3HATestFramework(L3AgentTestFramework):
NESTED_NAMESPACE_SEPARATOR = '@'
def setUp(self):
super(L3HATestFramework, self).setUp()
self.failover_agent = self._configure_agent('agent2')
br_int_1 = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
br_int_2 = get_ovs_bridge(
self.failover_agent.conf.ovs_integration_bridge)
veth1, veth2 = self.create_veth()
br_int_1.add_port(veth1.name)
br_int_2.add_port(veth2.name)
def test_ha_router_failover(self):
router_info = self.generate_router_info(enable_ha=True)
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router1 = self.manage_router(self.agent, router_info)
router_info_2 = copy.deepcopy(router_info)
router_info_2[l3_constants.HA_INTERFACE_KEY] = (
test_l3_agent.get_ha_interface(ip='169.254.192.2',
mac='22:22:22:22:22:22'))
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info_2['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.failover_agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router2 = self.manage_router(self.failover_agent, router_info_2)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
device_name = router1.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, namespace=router1.ns_name)
ha_device.link.set_down()
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
class MetadataFakeProxyHandler(object):
def __init__(self, status):
self.status = status
@webob.dec.wsgify()
def __call__(self, req):
return webob.Response(status=self.status)
class MetadataL3AgentTestCase(L3AgentTestFramework):
SOCKET_MODE = 0o644
def _create_metadata_fake_server(self, status):
server = utils.UnixDomainWSGIServer('metadata-fake-server')
self.addCleanup(server.stop)
# NOTE(cbrandily): TempDir fixture creates a folder with 0o700
# permissions but metadata_proxy_socket folder must be readable by all
# users
self.useFixture(
helpers.RecursivePermDirFixture(
os.path.dirname(self.agent.conf.metadata_proxy_socket), 0o555))
server.start(MetadataFakeProxyHandler(status),
self.agent.conf.metadata_proxy_socket,
workers=0, backlog=4096, mode=self.SOCKET_MODE)
def test_access_to_metadata_proxy(self):
"""Test access to the l3-agent metadata proxy.
The test creates:
* A l3-agent metadata service:
* A router (which creates a metadata proxy in the router namespace),
* A fake metadata server
* A "client" namespace (simulating a vm) with a port on router
internal subnet.
The test queries from the "client" namespace the metadata proxy on
http://169.254.169.254 and asserts that the metadata proxy added
the X-Forwarded-For and X-Neutron-Router-Id headers to the request
and forwarded the http request to the fake metadata server and the
response to the "client" namespace.
"""
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self._create_metadata_fake_server(webob.exc.HTTPOk.code)
# Create and configure client namespace
client_ns = self._create_namespace()
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by a fake machine
port = self.useFixture(
net_helpers.OVSPortFixture(br_int, client_ns.namespace)).port
port.addr.add(ip_cidr)
net_helpers.set_namespace_gateway(port,
router_ip_cidr.partition('/')[0])
# Query metadata proxy
url = 'http://%(host)s:%(port)s' % {'host': dhcp.METADATA_DEFAULT_IP,
'port': dhcp.METADATA_PORT}
cmd = 'curl', '--max-time', METADATA_REQUEST_TIMEOUT, '-D-', url
try:
raw_headers = client_ns.netns.execute(cmd)
except RuntimeError:
self.fail('metadata proxy unreachable on %s before timeout' % url)
# Check status code
firstline = raw_headers.splitlines()[0]
self.assertIn(str(webob.exc.HTTPOk.code), firstline.split())
class UnprivilegedUserMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
"""
SOCKET_MODE = 0o664
def setUp(self):
super(UnprivilegedUserMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class UnprivilegedUserGroupMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user/group.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
Its group has gid=65534 and is commonly named 'nobody' or 'nogroup', that's
why we use its gid.
"""
SOCKET_MODE = 0o666
def setUp(self):
super(UnprivilegedUserGroupMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_group', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class TestDvrRouter(L3AgentTestFramework):
def test_dvr_router_lifecycle_without_ha_without_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=False)
def test_dvr_router_lifecycle_without_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True)
def _helper_create_dvr_router_fips_for_ext_network(
self, agent_mode, **dvr_router_kwargs):
self.agent.conf.agent_mode = agent_mode
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
router = self.manage_router(self.agent, router_info)
fip_ns = router.fip_ns.get_name()
return router, fip_ns
def _validate_fips_for_external_network(self, router, fip_ns):
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_dvr_floating_ips(router)
self._assert_snat_namespace_does_not_exist(router)
def test_dvr_router_fips_for_multiple_ext_networks(self):
agent_mode = 'dvr'
# Create the first router fip with external net1
dvr_router1_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router1, fip1_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router1_kwargs))
# Validate the fip with external net1
self._validate_fips_for_external_network(router1, fip1_ns)
# Create the second router fip with external net2
dvr_router2_kwargs = {'ip_address': '19.4.5.3',
'subnet_cidr': '19.4.5.0/24',
'gateway_ip': '19.4.5.1',
'gateway_mac': 'ca:fe:de:ab:cd:fe'}
router2, fip2_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router2_kwargs))
# Validate the fip with external net2
self._validate_fips_for_external_network(router2, fip2_ns)
def _dvr_router_lifecycle(self, enable_ha=False, enable_snat=False,
custom_mtu=2000):
'''Test dvr router lifecycle
:param enable_ha: sets the ha value for the router.
:param enable_snat: the value of enable_snat is used
to set the agent_mode.
'''
# The value of agent_mode can be dvr, dvr_snat, or legacy.
# Since by definition this is a dvr (distributed = true)
# only dvr and dvr_snat are applicable
self.agent.conf.agent_mode = 'dvr_snat' if enable_snat else 'dvr'
self.agent.conf.network_device_mtu = custom_mtu
# We get the router info particular to a dvr router
router_info = self.generate_dvr_router_info(
enable_ha, enable_snat)
# We need to mock the get_agent_gateway_port return value
# because the whole L3PluginApi is mocked and we need the port
# gateway_port information before the l3_agent will create it.
# The port returned needs to have the same information as
# router_info['gw_port']
mocked_gw_port = (
neutron_l3_agent.L3PluginApi.return_value.get_agent_gateway_port)
mocked_gw_port.return_value = router_info['gw_port']
# We also need to mock the get_external_network_id method to
# get the correct fip namespace.
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
# With all that set we can now ask the l3_agent to
# manage the router (create it, create namespaces,
# attach interfaces, etc...)
router = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_dvr_external_device(router)
self._assert_dvr_gateway(router)
self._assert_dvr_floating_ips(router)
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_metadata_chains(router)
self._assert_extra_routes(router)
self._assert_rfp_fpr_mtu(router, custom_mtu)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
def generate_dvr_router_info(
self, enable_ha=False, enable_snat=False, **kwargs):
router = test_l3_agent.prepare_router_data(
enable_snat=enable_snat,
enable_floating_ip=True,
enable_ha=enable_ha,
**kwargs)
internal_ports = router.get(l3_constants.INTERFACE_KEY, [])
router['distributed'] = True
router['gw_port_host'] = self.agent.conf.host
router['gw_port']['binding:host_id'] = self.agent.conf.host
floating_ip = router['_floatingips'][0]
floating_ip['floating_network_id'] = router['gw_port']['network_id']
floating_ip['host'] = self.agent.conf.host
floating_ip['port_id'] = internal_ports[0]['id']
floating_ip['status'] = 'ACTIVE'
self._add_snat_port_info_to_router(router, internal_ports)
# FIP has a dependency on external gateway. So we need to create
# the snat_port info and fip_agent_gw_port_info irrespective of
# the agent type the dvr supports. The namespace creation is
# dependent on the agent_type.
external_gw_port = router['gw_port']
self._add_fip_agent_gw_port_info_to_router(router, external_gw_port)
return router
def _add_fip_agent_gw_port_info_to_router(self, router, external_gw_port):
# Add fip agent gateway port information to the router_info
fip_gw_port_list = router.get(
l3_constants.FLOATINGIP_AGENT_INTF_KEY, [])
if not fip_gw_port_list and external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
router[l3_constants.FLOATINGIP_AGENT_INTF_KEY] = [
{'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': 'network:floatingip_agent_gateway',
'mac_address': 'fa:16:3e:80:8d:89',
'binding:host_id': self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _add_snat_port_info_to_router(self, router, internal_ports):
# Add snat port information to the router
snat_port_list = router.get(l3_constants.SNAT_ROUTER_INTF_KEY, [])
if not snat_port_list and internal_ports:
# Get values from internal port
port = internal_ports[0]
fixed_ip = port['fixed_ips'][0]
snat_subnet = port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
snat_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add the info to router as the first snat port
# in the list of snat ports
prefixlen = netaddr.IPNetwork(snat_subnet['cidr']).prefixlen
router[l3_constants.SNAT_ROUTER_INTF_KEY] = [
{'subnets': [
{'cidr': snat_subnet['cidr'],
'gateway_ip': snat_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': port['network_id'],
'device_owner': 'network:router_centralized_snat',
'mac_address': 'fa:16:3e:80:8d:89',
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': snat_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _assert_dvr_external_device(self, router):
external_port = router.get_ex_gw_port()
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
# if the agent is in dvr_snat mode, then we have to check
# that the correct ports and ip addresses exist in the
# snat_ns_name namespace
if self.agent.conf.agent_mode == 'dvr_snat':
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
snat_ns_name))
# if the agent is in dvr mode then the snat_ns_name namespace
# should not be present at all:
elif self.agent.conf.agent_mode == 'dvr':
self.assertFalse(
self._namespace_exists(snat_ns_name),
"namespace %s was found but agent is in dvr mode not dvr_snat"
% (str(snat_ns_name))
)
# if the agent is anything else the test is misconfigured
# we force a test failure with message
else:
self.assertTrue(False, " agent not configured for dvr or dvr_snat")
def _assert_dvr_gateway(self, router):
gateway_expected_in_snat_namespace = (
self.agent.conf.agent_mode == 'dvr_snat'
)
if gateway_expected_in_snat_namespace:
self._assert_dvr_snat_gateway(router)
snat_namespace_should_not_exist = (
self.agent.conf.agent_mode == 'dvr'
)
if snat_namespace_should_not_exist:
self._assert_snat_namespace_does_not_exist(router)
def _assert_dvr_snat_gateway(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=namespace)
existing_gateway = (
external_device.route.get_gateway().get('gateway'))
expected_gateway = external_port['subnets'][0]['gateway_ip']
self.assertEqual(expected_gateway, existing_gateway)
def _assert_snat_namespace_does_not_exist(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertFalse(self._namespace_exists(namespace))
def _assert_dvr_floating_ips(self, router):
# in the fip namespace:
# Check that the fg-<port-id> (floatingip_agent_gateway)
# is created with the ip address of the external gateway port
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
self.assertTrue(floating_ips)
# We need to fetch the floatingip agent gateway port info
# from the router_info
floating_agent_gw_port = (
router.router[l3_constants.FLOATINGIP_AGENT_INTF_KEY])
self.assertTrue(floating_agent_gw_port)
external_gw_port = floating_agent_gw_port[0]
fip_ns = self.agent.get_fip_ns(floating_ips[0]['floating_network_id'])
fip_ns_name = fip_ns.get_name()
fg_port_created_successfully = ip_lib.device_exists_with_ips_and_mac(
fip_ns.get_ext_device_name(external_gw_port['id']),
[self._port_first_ip_cidr(external_gw_port)],
external_gw_port['mac_address'],
namespace=fip_ns_name)
self.assertTrue(fg_port_created_successfully)
# Check fpr-router device has been created
device_name = fip_ns.get_int_device_name(router.router_id)
fpr_router_device_created_successfully = ip_lib.device_exists(
device_name, namespace=fip_ns_name)
self.assertTrue(fpr_router_device_created_successfully)
# In the router namespace
# Check rfp-<router-id> is created correctly
for fip in floating_ips:
device_name = fip_ns.get_rtr_ext_device_name(router.router_id)
self.assertTrue(ip_lib.device_exists(
device_name, namespace=router.ns_name))
def test_dvr_router_rem_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self._create_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12', self.agent.conf.host)
fip_ns = router1.fip_ns.get_name()
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router[l3_constants.FLOATINGIP_KEY] = []
self._create_router(restarted_agent, router1.router)
self._assert_dvr_snat_gateway(router1)
self.assertFalse(self._namespace_exists(fip_ns))
def test_dvr_router_add_internal_network_set_arp_cache(self):
# Check that, when the router is set up and there are
# existing ports on the the uplinked subnet, the ARP
# cache is properly populated.
self.agent.conf.agent_mode = 'dvr_snat'
router_info = test_l3_agent.prepare_router_data()
router_info['distributed'] = True
expected_neighbor = '35.4.1.10'
port_data = {
'fixed_ips': [{'ip_address': expected_neighbor}],
'mac_address': 'fa:3e:aa:bb:cc:dd',
'device_owner': 'compute:None'
}
self.agent.plugin_rpc.get_ports_by_subnet.return_value = [port_data]
router1 = self._create_router(self.agent, router_info)
internal_device = router1.get_internal_device_name(
router_info['_interfaces'][0]['id'])
neighbors = ip_lib.IPDevice(internal_device, router1.ns_name).neigh
self.assertEqual(expected_neighbor, neighbors.show().split()[0])
def _assert_rfp_fpr_mtu(self, router, expected_mtu=1500):
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_rtr_ext_device_name,
router.ns_name)
self.assertEqual(expected_mtu, dev_mtu)
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_int_device_name,
router.fip_ns.get_name())
self.assertEqual(expected_mtu, dev_mtu)
| pnavarro/neutron | neutron/tests/functional/agent/test_l3_agent.py | Python | apache-2.0 | 53,684 |
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports=======================
import os
from datetime import datetime, timedelta
from threading import Lock
import six
from sqlalchemy import create_engine, distinct, MetaData
from sqlalchemy.exc import (
SQLAlchemyError,
InvalidRequestError,
StatementError,
DBAPIError,
OperationalError,
)
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
from traits.api import (
Password,
Bool,
Str,
on_trait_change,
Any,
Property,
cached_property,
Int,
)
from pychron.database.core.base_orm import AlembicVersionTable
from pychron.database.core.query import compile_query
from pychron.loggable import Loggable
from pychron.regex import IPREGEX
def obscure_host(h):
if IPREGEX.match(h):
h = "x.x.x.{}".format(h.split(".")[-1])
return h
def binfunc(ds, hours):
ds = [dx.timestamp for dx in ds]
p1 = ds[0]
delta_seconds = hours * 3600
td = timedelta(seconds=delta_seconds * 0.25)
for i, di in enumerate(ds):
i = max(0, i - 1)
dd = ds[i]
if (di - dd).total_seconds() > delta_seconds:
yield p1 - td, dd + td
p1 = di
yield p1 - td, di + td
class SessionCTX(object):
def __init__(self, parent, use_parent_session=True):
self._use_parent_session = use_parent_session
self._parent = parent
self._session = None
self._psession = None
def __enter__(self):
if self._use_parent_session:
self._parent.create_session()
return self._parent.session
else:
self._psession = self._parent.session
self._session = self._parent.session_factory()
self._parent.session = self._session
return self._session
def __exit__(self, exc_type, exc_val, exc_tb):
if self._session:
self._session.close()
else:
self._parent.close_session()
if self._psession:
self._parent.session = self._psession
self._psession = None
class MockQuery:
def join(self, *args, **kw):
return self
def filter(self, *args, **kw):
# type: (object, object) -> object
return self
def all(self, *args, **kw):
return []
def order_by(self, *args, **kw):
return self
class MockSession:
def query(self, *args, **kw):
return MockQuery()
# def __getattr__(self, item):
# return
class DatabaseAdapter(Loggable):
"""
The DatabaseAdapter is a base class for interacting with a SQLAlchemy database.
Two main subclasses are used by pychron, IsotopeAdapter and MassSpecDatabaseAdapter.
This class provides attributes for describing the database url, i.e host, user, password etc,
and methods for connecting and opening database sessions.
It also provides some helper functions used extensively by the subclasses, e.g. ``_add_item``,
``_retrieve_items``
"""
session = None
sess_stack = 0
reraise = False
connected = Bool(False)
kind = Str
prev_kind = Str
username = Str
host = Str
password = Password
timeout = Int
session_factory = None
application = Any
test_func = "get_versions"
version_func = "get_versions"
autoflush = True
autocommit = False
commit_on_add = True
# name used when writing to database
# save_username = Str
connection_parameters_changed = Bool
url = Property(depends_on="connection_parameters_changed")
datasource_url = Property(depends_on="connection_parameters_changed")
path = Str
echo = False
verbose_retrieve_query = False
verbose = True
connection_error = Str
_session_lock = None
modified = False
_trying_to_add = False
_test_connection_enabled = True
def __init__(self, *args, **kw):
super(DatabaseAdapter, self).__init__(*args, **kw)
self._session_lock = Lock()
def create_all(self, metadata):
"""
Build a database schema with the current connection
:param metadata: SQLAchemy MetaData object
"""
# if self.kind == 'sqlite':
metadata.create_all(self.session.bind)
# def session_ctx(self, sess=None, commit=True, rollback=True):
# """
# Make a new session context.
#
# :return: ``SessionCTX``
# """
# with self._session_lock:
# if sess is None:
# sess = self.sess
# return SessionCTX(sess, parent=self, commit=commit, rollback=rollback)
_session_cnt = 0
def session_ctx(self, use_parent_session=True):
with self._session_lock:
return SessionCTX(self, use_parent_session)
def create_session(self, force=False):
if self.connect(test=False):
if self.session_factory:
if force:
self.debug("force create new session {}".format(id(self)))
if self.session:
self.session.close()
self.session = self.session_factory()
self._session_cnt = 1
else:
if not self.session:
# self.debug('create new session {}'.format(id(self)))
self.session = self.session_factory()
self._session_cnt += 1
else:
self.warning("no session factory")
else:
self.session = MockSession()
def close_session(self):
if self.session and not isinstance(self.session, MockSession):
self.session.flush()
self._session_cnt -= 1
if not self._session_cnt:
self.debug("close session {}".format(id(self)))
self.session.close()
self.session = None
@property
def enabled(self):
return self.kind in ["mysql", "sqlite", "postgresql", "mssql"]
@property
def save_username(self):
from pychron.globals import globalv
return globalv.username
@on_trait_change("username,host,password,name,kind,path")
def reset_connection(self):
"""
Trip the ``connection_parameters_changed`` flag. Next ``connect`` call with use the new values
"""
self.connection_parameters_changed = True
self.session_factory = None
self.session = None
# @caller
def connect(
self, test=True, force=False, warn=True, version_warn=True, attribute_warn=False
):
"""
Connect to the database
:param test: Test the connection by running ``test_func``
:param force: Test connection even if connection parameters haven't changed
:param warn: Warn if the connection test fails
:param version_warn: Warn if database/pychron versions don't match
:return: True if connected else False
:rtype: bool
"""
self.connection_error = ""
if force:
self.debug("forcing database connection")
if self.connection_parameters_changed:
self._test_connection_enabled = True
force = True
if not self.connected or force:
# self.connected = True if self.kind == 'sqlite' else False
self.connected = False
pool_recycle = 600
if self.kind == "sqlite":
self.connected = True
test = False
pool_recycle = -1
self.connection_error = (
'Database "{}" kind not set. '
'Set in Preferences. current kind="{}"'.format(self.name, self.kind)
)
if not self.enabled:
from pychron.core.ui.gui import invoke_in_main_thread
invoke_in_main_thread(self.warning_dialog, self.connection_error)
else:
url = self.url
if url is not None:
self.info(
"{} connecting to database {}".format(id(self), self.public_url)
)
engine = create_engine(
url, echo=self.echo, pool_recycle=pool_recycle
)
self.session_factory = sessionmaker(
bind=engine,
autoflush=self.autoflush,
expire_on_commit=False,
autocommit=self.autocommit,
)
if test:
if not self._test_connection_enabled:
warn = False
else:
if self.test_func:
self.connected = self._test_db_connection(version_warn)
else:
self.connected = True
else:
self.connected = True
if self.connected:
self.info("connected to db {}".format(self.public_url))
# self.initialize_database()
else:
self.connection_error = 'Not Connected to Database "{}".\nAccess Denied for user= {} \
host= {}\nurl= {}'.format(
self.name, self.username, self.host, self.public_url
)
if warn:
from pychron.core.ui.gui import invoke_in_main_thread
invoke_in_main_thread(
self.warning_dialog, self.connection_error
)
self.connection_parameters_changed = False
return self.connected
# def initialize_database(self):
# pass
def rollback(self):
if self.session:
self.session.rollback()
def flush(self):
"""
flush the session
"""
if self.session:
try:
self.session.flush()
except:
self.session.rollback()
def expire(self, i):
if self.session:
self.session.expire(i)
def expire_all(self):
if self.session:
self.session.expire_all()
def commit(self):
"""
commit the session
"""
if self.session:
try:
self.session.commit()
except BaseException as e:
self.warning("Commit exception: {}".format(e))
self.session.rollback()
def delete(self, obj):
if self.session:
self.session.delete(obj)
def post_commit(self):
if self._trying_to_add:
self.modified = True
def add_item(self, *args, **kw):
return self._add_item(*args, **kw)
# def get_session(self):
# """
# return the current session or make a new one
#
# :return: Session
# """
# sess = self.sess
# if sess is None:
# self.debug('$$$$$$$$$$$$$$$$ session is None')
# sess = self.session_factory()
#
# return sess
def get_migrate_version(self, **kw):
"""
Query the AlembicVersionTable
"""
q = self.session.query(AlembicVersionTable)
mv = q.one()
return mv
def get_versions(self, **kw):
pass
@property
def public_datasource_url(self):
if self.kind == "sqlite":
url = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
url = "{}:{}".format(obscure_host(self.host), self.name)
return url
@cached_property
def _get_datasource_url(self):
if self.kind == "sqlite":
url = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
url = "{}:{}".format(self.host, self.name)
return url
@property
def public_url(self):
kind = self.kind
user = self.username
host = self.host
name = self.name
if kind == "sqlite":
pu = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
pu = "{}://{}@{}/{}".format(kind, user, host, name)
return pu
@cached_property
def _get_url(self):
kind = self.kind
password = self.password
user = self.username
host = self.host
name = self.name
timeout = self.timeout
if kind in ("mysql", "postgresql", "mssql"):
if kind == "mysql":
# add support for different mysql drivers
driver = self._import_mysql_driver()
if driver is None:
return
elif kind == "mssql":
driver = self._import_mssql_driver()
if driver is None:
return
else:
driver = "pg8000"
if password:
user = "{}:{}".format(user, password)
prefix = "{}+{}://{}@".format(kind, driver, user)
if driver == "pyodbc":
url = "{}{}".format(prefix, name)
else:
url = "{}{}/{}".format(prefix, host, name)
if kind == "mysql" and self.timeout:
url = "{}?connect_timeout={}".format(url, timeout)
else:
url = "sqlite:///{}".format(self.path)
return url
def _import_mssql_driver(self):
driver = None
try:
import pyodbc
driver = "pyodbc"
except ImportError:
try:
import pymssql
driver = "pymssql"
except ImportError:
pass
self.info('using mssql driver="{}"'.format(driver))
return driver
def _import_mysql_driver(self):
try:
"""
pymysql
https://github.com/petehunt/PyMySQL/
"""
import pymysql
driver = "pymysql"
except ImportError:
try:
import _mysql
driver = "mysqldb"
except ImportError:
self.warning_dialog(
"A mysql driver was not found. Install PyMySQL or MySQL-python"
)
return
self.info('using mysql driver="{}"'.format(driver))
return driver
def _test_db_connection(self, version_warn):
self.connected = True
self.create_session()
try:
self.info("testing database connection {}".format(self.test_func))
vers = getattr(self, self.test_func)(reraise=True)
if version_warn:
self._version_warn_hook()
connected = True
except OperationalError:
self.warning("Operational connection failed to {}".format(self.public_url))
connected = False
self._test_connection_enabled = False
except Exception as e:
self.debug_exception()
self.warning(
"connection failed to {} exception={}".format(self.public_url, e)
)
connected = False
finally:
self.info("closing test session")
self.close_session()
return connected
def _version_warn_hook(self):
pass
# def test_version(self):
# ver = getattr(self, self.version_func)()
# ver = ver.version_num
# aver = version.__alembic__
# if ver != aver:
# return 'Database is out of data. Pychron ver={}, Database ver={}'.format(aver, ver)
def _add_item(self, obj):
sess = self.session
if sess:
sess.add(obj)
try:
if self.autoflush:
sess.flush()
self.modified = True
self._trying_to_add = True
if not self.autocommit and self.commit_on_add:
sess.commit()
return obj
except SQLAlchemyError as e:
import traceback
self.debug(
"add_item exception {} {}".format(obj, traceback.format_exc())
)
sess.rollback()
if self.reraise:
raise
else:
self.critical("No session")
def _add_unique(self, item, attr, name):
nitem = getattr(self, "get_{}".format(attr))(name)
if nitem is None:
self.info("adding {}= {}".format(attr, name))
self._add_item(item)
nitem = item
return nitem
def _get_date_range(self, q, asc, desc, hours=0):
lan = q.order_by(asc).first()
han = q.order_by(desc).first()
lan = datetime.now() if not lan else lan.timestamp
han = datetime.now() if not han else han.timestamp
td = timedelta(hours=hours)
return lan - td, han + td
def _delete_item(self, value, name=None):
if name is not None:
func = getattr(self, "get_{}".format(name))
item = func(value)
else:
item = value
if item:
self.debug("deleting value={},name={},item={}".format(value, name, item))
self.session.delete(item)
def _retrieve_items(
self,
table,
joins=None,
filters=None,
limit=None,
order=None,
distinct_=False,
query_hook=None,
reraise=False,
func="all",
group_by=None,
verbose_query=False,
):
sess = self.session
if sess is None or isinstance(sess, MockSession):
self.debug("USING MOCKSESSION************** {}".format(sess))
return []
if distinct_:
if isinstance(distinct_, bool):
q = sess.query(distinct(table))
else:
q = sess.query(distinct(distinct_))
elif isinstance(table, tuple):
q = sess.query(*table)
else:
q = sess.query(table)
if joins:
try:
for ji in joins:
if ji != table:
q = q.join(ji)
except InvalidRequestError:
if reraise:
raise
if filters is not None:
for fi in filters:
q = q.filter(fi)
if order is not None:
if not isinstance(order, tuple):
order = (order,)
q = q.order_by(*order)
if group_by is not None:
if not isinstance(order, tuple):
group_by = (group_by,)
q = q.group_by(*group_by)
if limit is not None:
q = q.limit(limit)
if query_hook:
q = query_hook(q)
if verbose_query or self.verbose_retrieve_query:
# print compile_query(q)
self.debug(compile_query(q))
items = self._query(q, func, reraise)
if items is None:
items = []
return items
def _retrieve_first(self, table, value=None, key="name", order_by=None):
if value is not None:
if not isinstance(value, (str, int, six.text_type, int, float)):
return value
q = self.session.query(table)
if value is not None:
q = q.filter(getattr(table, key) == value)
try:
if order_by is not None:
q = q.order_by(order_by)
return q.first()
except SQLAlchemyError as e:
print("execption first", e)
return
def _query_all(self, q, **kw):
ret = self._query(q, "all", **kw)
return ret or []
def _query_first(self, q, **kw):
return self._query(q, "first", **kw)
def _query_one(self, q, **kw):
q = q.limit(1)
return self._query(q, "one", **kw)
def _query(self, q, func, reraise=False, verbose_query=False):
if verbose_query:
try:
cq = compile_query(q)
self.debug(cq)
except BaseException:
cq = "Query failed to compile"
self.debug_exception()
# print compile_query(q)
f = getattr(q, func)
try:
return f()
except NoResultFound:
if verbose_query:
self.info("no results found for query -- {}".format(cq))
except OperationalError as e:
self.debug("_query operation exception")
self.debug_exception()
except SQLAlchemyError as e:
if self.verbose:
self.debug("_query exception {}".format(e))
try:
self.rollback()
self.reset_connection()
self.connect()
except BaseException:
pass
if reraise:
raise e
def _append_filters(self, f, kw):
filters = kw.get("filters", [])
if isinstance(f, (tuple, list)):
filters.extend(f)
else:
filters.append(f)
kw["filters"] = filters
return kw
def _append_joins(self, f, kw):
joins = kw.get("joins", [])
if isinstance(f, (tuple, list)):
joins.extend(f)
else:
joins.append(f)
kw["joins"] = joins
return kw
def _retrieve_item(
self,
table,
value,
key="name",
last=None,
joins=None,
filters=None,
options=None,
verbose=True,
verbose_query=False,
):
if not isinstance(value, (str, int, six.text_type, int, float, list, tuple)):
return value
if not isinstance(value, (list, tuple)):
value = (value,)
if not isinstance(key, (list, tuple)):
key = (key,)
def __retrieve(s):
q = s.query(table)
if joins:
try:
for ji in joins:
if ji != table:
q = q.join(ji)
except InvalidRequestError:
pass
if filters is not None:
for fi in filters:
q = q.filter(fi)
for k, v in zip(key, value):
q = q.filter(getattr(table, k) == v)
if last:
q = q.order_by(last)
if verbose_query or self.verbose_retrieve_query:
self.debug(compile_query(q))
ntries = 3
import traceback
for i in range(ntries):
try:
return q.one()
except (DBAPIError, OperationalError, StatementError):
self.debug(traceback.format_exc())
s.rollback()
continue
except MultipleResultsFound:
if verbose:
self.debug(
"multiples row found for {} {} {}. Trying to get last row".format(
table.__tablename__, key, value
)
)
try:
if hasattr(table, "id"):
q = q.order_by(table.id.desc())
return q.limit(1).all()[-1]
except (SQLAlchemyError, IndexError, AttributeError) as e:
if verbose:
self.debug(
"no rows for {} {} {}".format(
table.__tablename__, key, value
)
)
break
except NoResultFound:
if verbose and self.verbose:
self.debug(
"no row found for {} {} {}".format(
table.__tablename__, key, value
)
)
break
close = False
if self.session is None:
self.create_session()
close = True
ret = __retrieve(self.session)
if close:
self.close_session()
return ret
def _get_items(
self,
table,
gtables,
join_table=None,
filter_str=None,
limit=None,
order=None,
key=None,
):
if isinstance(join_table, str):
join_table = gtables[join_table]
q = self._get_query(table, join_table=join_table, filter_str=filter_str)
if order:
for o in order if isinstance(order, list) else [order]:
q = q.order_by(o)
if limit:
q = q.limit(limit)
# reorder based on id
if order:
q = q.from_self()
q = q.order_by(table.id)
res = q.all()
if key:
return [getattr(ri, key) for ri in res]
return res
class PathDatabaseAdapter(DatabaseAdapter):
path_table = None
def add_path(self, rec, path, **kw):
if self.path_table is None:
raise NotImplementedError
kw = self._get_path_keywords(path, kw)
p = self.path_table(**kw)
rec.path = p
return p
def _get_path_keywords(self, path, args):
n = os.path.basename(path)
r = os.path.dirname(path)
args["root"] = r
args["filename"] = n
return args
class SQLiteDatabaseAdapter(DatabaseAdapter):
kind = "sqlite"
def build_database(self):
self.connect(test=False)
if not os.path.isfile(self.path):
meta = MetaData()
self._build_database(self.session, meta)
def _build_database(self, sess, meta):
raise NotImplementedError
# ============= EOF =============================================
| USGSDenverPychron/pychron | pychron/database/core/database_adapter.py | Python | apache-2.0 | 27,114 |
from .forms import SetupForm
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from splunkdj.decorators.render import render_to
from splunkdj.setup import create_setup_view_context
@login_required
def home(request):
# Redirect to the default view, which happens to be a non-framework view
return redirect('/en-us/app/twitter2/twitter_general')
@render_to('twitter2:setup.html')
@login_required
def setup(request):
result = create_setup_view_context(
request,
SetupForm,
reverse('twitter2:home'))
# HACK: Workaround DVPL-4647 (Splunk 6.1 and below):
# Refresh current app's state so that non-framework views
# observe when the app becomes configured.
service = request.service
app_name = service.namespace['app']
service.apps[app_name].post('_reload')
return result
| dakiri/splunk-app-twitter | twitter2/django/twitter2/views.py | Python | apache-2.0 | 944 |
#!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import datetime
import sys
import unittest
import test_env
test_env.setup_test_env()
# From components/third_party/
import webtest
import webapp2
import stats
from components import stats_framework
from support import stats_framework_mock
from support import test_case
# pylint: disable=R0201
class Store(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.STORE, 2048, 'GS; inline')
self.response.write('Yay')
class Return(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.RETURN, 4096, 'memcache')
self.response.write('Yay')
class Lookup(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.LOOKUP, 200, 103)
self.response.write('Yay')
class Dupe(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.DUPE, 1024, 'inline')
self.response.write('Yay')
def to_str(now, delta):
"""Converts a datetime to unicode."""
now = now + datetime.timedelta(seconds=delta)
return unicode(now.strftime(stats.utils.DATETIME_FORMAT))
class StatsTest(test_case.TestCase, stats_framework_mock.MockMixIn):
def setUp(self):
super(StatsTest, self).setUp()
fake_routes = [
('/store', Store),
('/return', Return),
('/lookup', Lookup),
('/dupe', Dupe),
]
self.app = webtest.TestApp(
webapp2.WSGIApplication(fake_routes, debug=True),
extra_environ={'REMOTE_ADDR': 'fake-ip'})
stats_framework_mock.configure(self)
self.now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(self.now, 0)
def _test_handler(self, url, added_data):
stats_framework_mock.reset_timestamp(stats.STATS_HANDLER, self.now)
self.assertEqual('Yay', self.app.get(url).body)
self.assertEqual(1, len(list(stats_framework.yield_entries(None, None))))
self.mock_now(self.now, 60)
self.assertEqual(10, stats.generate_stats())
actual = stats_framework.get_stats(
stats.STATS_HANDLER, 'minutes', self.now, 1, True)
expected = [
{
'contains_lookups': 0,
'contains_requests': 0,
'downloads': 0,
'downloads_bytes': 0,
'failures': 0,
'key': datetime.datetime(2010, 1, 2, 3, 4),
'other_requests': 0,
'requests': 1,
'uploads': 0,
'uploads_bytes': 0,
},
]
expected[0].update(added_data)
self.assertEqual(expected, actual)
def test_store(self):
expected = {
'uploads': 1,
'uploads_bytes': 2048,
}
self._test_handler('/store', expected)
def test_return(self):
expected = {
'downloads': 1,
'downloads_bytes': 4096,
}
self._test_handler('/return', expected)
def test_lookup(self):
expected = {
'contains_lookups': 200,
'contains_requests': 1,
}
self._test_handler('/lookup', expected)
def test_dupe(self):
expected = {
'other_requests': 1,
}
self._test_handler('/dupe', expected)
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
unittest.main()
| madecoste/swarming | appengine/isolate/tests/stats_test.py | Python | apache-2.0 | 3,366 |
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from .. import mlog
from .. import build
from ..mesonlib import MesonException, Popen_safe
from ..dependencies import Qt4Dependency
from . import ExtensionModule
import xml.etree.ElementTree as ET
from . import ModuleReturnValue
class Qt4Module(ExtensionModule):
tools_detected = False
def _detect_tools(self, env, method):
if self.tools_detected:
return
mlog.log('Detecting Qt4 tools')
# FIXME: We currently require Qt4 to exist while importing the module.
# We should make it gracefully degrade and not create any targets if
# the import is marked as 'optional' (not implemented yet)
kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method}
qt4 = Qt4Dependency(env, kwargs)
# Get all tools and then make sure that they are the right version
self.moc, self.uic, self.rcc = qt4.compilers_detect()
# Moc, uic and rcc write their version strings to stderr.
# Moc and rcc return a non-zero result when doing so.
# What kind of an idiot thought that was a good idea?
if self.moc.found():
stdout, stderr = Popen_safe(self.moc.get_command() + ['-v'])[1:3]
stdout = stdout.strip()
stderr = stderr.strip()
if 'Qt Meta' in stderr:
moc_ver = stderr
else:
raise MesonException('Moc preprocessor is not for Qt 4. Output:\n%s\n%s' %
(stdout, stderr))
mlog.log(' moc:', mlog.green('YES'), '(%s, %s)' %
(self.moc.get_path(), moc_ver.split()[-1]))
else:
mlog.log(' moc:', mlog.red('NO'))
if self.uic.found():
stdout, stderr = Popen_safe(self.uic.get_command() + ['-v'])[1:3]
stdout = stdout.strip()
stderr = stderr.strip()
if 'version 4.' in stderr:
uic_ver = stderr
else:
raise MesonException('Uic compiler is not for Qt4. Output:\n%s\n%s' %
(stdout, stderr))
mlog.log(' uic:', mlog.green('YES'), '(%s, %s)' %
(self.uic.get_path(), uic_ver.split()[-1]))
else:
mlog.log(' uic:', mlog.red('NO'))
if self.rcc.found():
stdout, stderr = Popen_safe(self.rcc.get_command() + ['-v'])[1:3]
stdout = stdout.strip()
stderr = stderr.strip()
if 'version 4.' in stderr:
rcc_ver = stderr
else:
raise MesonException('Rcc compiler is not for Qt 4. Output:\n%s\n%s' %
(stdout, stderr))
mlog.log(' rcc:', mlog.green('YES'), '(%s, %s)'
% (self.rcc.get_path(), rcc_ver.split()[-1]))
else:
mlog.log(' rcc:', mlog.red('NO'))
self.tools_detected = True
def parse_qrc(self, state, fname):
abspath = os.path.join(state.environment.source_dir, state.subdir, fname)
relative_part = os.path.split(fname)[0]
try:
tree = ET.parse(abspath)
root = tree.getroot()
result = []
for child in root[0]:
if child.tag != 'file':
mlog.warning("malformed rcc file: ", os.path.join(state.subdir, fname))
break
else:
result.append(os.path.join(state.subdir, relative_part, child.text))
return result
except Exception:
return []
def preprocess(self, state, args, kwargs):
rcc_files = kwargs.pop('qresources', [])
if not isinstance(rcc_files, list):
rcc_files = [rcc_files]
ui_files = kwargs.pop('ui_files', [])
if not isinstance(ui_files, list):
ui_files = [ui_files]
moc_headers = kwargs.pop('moc_headers', [])
if not isinstance(moc_headers, list):
moc_headers = [moc_headers]
moc_sources = kwargs.pop('moc_sources', [])
if not isinstance(moc_sources, list):
moc_sources = [moc_sources]
sources = kwargs.pop('sources', [])
if not isinstance(sources, list):
sources = [sources]
sources += args[1:]
method = kwargs.get('method', 'auto')
self._detect_tools(state.environment, method)
err_msg = "{0} sources specified and couldn't find {1}, " \
"please check your qt4 installation"
if len(moc_headers) + len(moc_sources) > 0 and not self.moc.found():
raise MesonException(err_msg.format('MOC', 'moc-qt4'))
if len(rcc_files) > 0:
if not self.rcc.found():
raise MesonException(err_msg.format('RCC', 'rcc-qt4'))
qrc_deps = []
for i in rcc_files:
qrc_deps += self.parse_qrc(state, i)
if len(args) > 0:
name = args[0]
else:
basename = os.path.split(rcc_files[0])[1]
name = 'qt4-' + basename.replace('.', '_')
rcc_kwargs = {'input': rcc_files,
'output': name + '.cpp',
'command': [self.rcc, '-o', '@OUTPUT@', '@INPUT@'],
'depend_files': qrc_deps}
res_target = build.CustomTarget(name, state.subdir, rcc_kwargs)
sources.append(res_target)
if len(ui_files) > 0:
if not self.uic.found():
raise MesonException(err_msg.format('UIC', 'uic-qt4'))
ui_kwargs = {'output': 'ui_@[email protected]',
'arguments': ['-o', '@OUTPUT@', '@INPUT@']}
ui_gen = build.Generator([self.uic], ui_kwargs)
ui_output = ui_gen.process_files('Qt4 ui', ui_files, state)
sources.append(ui_output)
if len(moc_headers) > 0:
moc_kwargs = {'output': 'moc_@[email protected]',
'arguments': ['@INPUT@', '-o', '@OUTPUT@']}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = moc_gen.process_files('Qt4 moc header', moc_headers, state)
sources.append(moc_output)
if len(moc_sources) > 0:
moc_kwargs = {'output': '@[email protected]',
'arguments': ['@INPUT@', '-o', '@OUTPUT@']}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = moc_gen.process_files('Qt4 moc source', moc_sources, state)
sources.append(moc_output)
return ModuleReturnValue(sources, sources)
def initialize():
mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:',
mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
return Qt4Module()
| rhd/meson | mesonbuild/modules/qt4.py | Python | apache-2.0 | 7,453 |
# Copyright (c) 2010-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import optparse
import re
import socket
from swift.common import exceptions
from swift.common.utils import expand_ipv6, is_valid_ip, is_valid_ipv4, \
is_valid_ipv6
def tiers_for_dev(dev):
"""
Returns a tuple of tiers for a given device in ascending order by
length.
:returns: tuple of tiers
"""
t1 = dev['region']
t2 = dev['zone']
t3 = dev['ip']
t4 = dev['id']
return ((t1,),
(t1, t2),
(t1, t2, t3),
(t1, t2, t3, t4))
def build_tier_tree(devices):
"""
Construct the tier tree from the zone layout.
The tier tree is a dictionary that maps tiers to their child tiers.
A synthetic root node of () is generated so that there's one tree,
not a forest.
Example:
region 1 -+---- zone 1 -+---- 192.168.101.1 -+---- device id 0
| | |
| | +---- device id 1
| | |
| | +---- device id 2
| |
| +---- 192.168.101.2 -+---- device id 3
| |
| +---- device id 4
| |
| +---- device id 5
|
+---- zone 2 -+---- 192.168.102.1 -+---- device id 6
| |
| +---- device id 7
| |
| +---- device id 8
|
+---- 192.168.102.2 -+---- device id 9
|
+---- device id 10
region 2 -+---- zone 1 -+---- 192.168.201.1 -+---- device id 12
| |
| +---- device id 13
| |
| +---- device id 14
|
+---- 192.168.201.2 -+---- device id 15
|
+---- device id 16
|
+---- device id 17
The tier tree would look like:
{
(): [(1,), (2,)],
(1,): [(1, 1), (1, 2)],
(2,): [(2, 1)],
(1, 1): [(1, 1, 192.168.101.1),
(1, 1, 192.168.101.2)],
(1, 2): [(1, 2, 192.168.102.1),
(1, 2, 192.168.102.2)],
(2, 1): [(2, 1, 192.168.201.1),
(2, 1, 192.168.201.2)],
(1, 1, 192.168.101.1): [(1, 1, 192.168.101.1, 0),
(1, 1, 192.168.101.1, 1),
(1, 1, 192.168.101.1, 2)],
(1, 1, 192.168.101.2): [(1, 1, 192.168.101.2, 3),
(1, 1, 192.168.101.2, 4),
(1, 1, 192.168.101.2, 5)],
(1, 2, 192.168.102.1): [(1, 2, 192.168.102.1, 6),
(1, 2, 192.168.102.1, 7),
(1, 2, 192.168.102.1, 8)],
(1, 2, 192.168.102.2): [(1, 2, 192.168.102.2, 9),
(1, 2, 192.168.102.2, 10)],
(2, 1, 192.168.201.1): [(2, 1, 192.168.201.1, 12),
(2, 1, 192.168.201.1, 13),
(2, 1, 192.168.201.1, 14)],
(2, 1, 192.168.201.2): [(2, 1, 192.168.201.2, 15),
(2, 1, 192.168.201.2, 16),
(2, 1, 192.168.201.2, 17)],
}
:devices: device dicts from which to generate the tree
:returns: tier tree
"""
tier2children = defaultdict(set)
for dev in devices:
for tier in tiers_for_dev(dev):
if len(tier) > 1:
tier2children[tier[0:-1]].add(tier)
else:
tier2children[()].add(tier)
return tier2children
def validate_and_normalize_ip(ip):
"""
Return normalized ip if the ip is a valid ip.
Otherwise raise ValueError Exception. The hostname is
normalized to all lower case. IPv6-addresses are converted to
lowercase and fully expanded.
"""
# first convert to lower case
new_ip = ip.lower()
if is_valid_ipv4(new_ip):
return new_ip
elif is_valid_ipv6(new_ip):
return expand_ipv6(new_ip)
else:
raise ValueError('Invalid ip %s' % ip)
def validate_and_normalize_address(address):
"""
Return normalized address if the address is a valid ip or hostname.
Otherwise raise ValueError Exception. The hostname is
normalized to all lower case. IPv6-addresses are converted to
lowercase and fully expanded.
RFC1123 2.1 Host Names and Nubmers
DISCUSSION
This last requirement is not intended to specify the complete
syntactic form for entering a dotted-decimal host number;
that is considered to be a user-interface issue. For
example, a dotted-decimal number must be enclosed within
"[ ]" brackets for SMTP mail (see Section 5.2.17). This
notation could be made universal within a host system,
simplifying the syntactic checking for a dotted-decimal
number.
If a dotted-decimal number can be entered without such
identifying delimiters, then a full syntactic check must be
made, because a segment of a host domain name is now allowed
to begin with a digit and could legally be entirely numeric
(see Section 6.1.2.4). However, a valid host name can never
have the dotted-decimal form #.#.#.#, since at least the
highest-level component label will be alphabetic.
"""
new_address = address.lstrip('[').rstrip(']')
if address.startswith('[') and address.endswith(']'):
return validate_and_normalize_ip(new_address)
new_address = new_address.lower()
if is_valid_ipv4(new_address):
return new_address
elif is_valid_ipv6(new_address):
return expand_ipv6(new_address)
elif is_valid_hostname(new_address):
return new_address
else:
raise ValueError('Invalid address %s' % address)
def is_valid_hostname(hostname):
"""
Return True if the provided hostname is a valid hostname
"""
if len(hostname) < 1 or len(hostname) > 255:
return False
if hostname.endswith('.'):
# strip exactly one dot from the right, if present
hostname = hostname[:-1]
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split("."))
def is_local_device(my_ips, my_port, dev_ip, dev_port):
"""
Return True if the provided dev_ip and dev_port are among the IP
addresses specified in my_ips and my_port respectively.
To support accurate locality determination in the server-per-port
deployment, when my_port is None, only IP addresses are used for
determining locality (dev_port is ignored).
If dev_ip is a hostname then it is first translated to an IP
address before checking it against my_ips.
"""
candidate_ips = []
if not is_valid_ip(dev_ip) and is_valid_hostname(dev_ip):
try:
# get the ip for this host; use getaddrinfo so that
# it works for both ipv4 and ipv6 addresses
addrinfo = socket.getaddrinfo(dev_ip, dev_port)
for addr in addrinfo:
family = addr[0]
dev_ip = addr[4][0] # get the ip-address
if family == socket.AF_INET6:
dev_ip = expand_ipv6(dev_ip)
candidate_ips.append(dev_ip)
except socket.gaierror:
return False
else:
if is_valid_ipv6(dev_ip):
dev_ip = expand_ipv6(dev_ip)
candidate_ips = [dev_ip]
for dev_ip in candidate_ips:
if dev_ip in my_ips and (my_port is None or dev_port == my_port):
return True
return False
def parse_search_value(search_value):
"""The <search-value> can be of the form::
d<device_id>r<region>z<zone>-<ip>:<port>R<r_ip>:<r_port>/
<device_name>_<meta>
Where <r_ip> and <r_port> are replication ip and port.
Any part is optional, but you must include at least one part.
Examples::
d74 Matches the device id 74
r4 Matches devices in region 4
z1 Matches devices in zone 1
z1-1.2.3.4 Matches devices in zone 1 with the ip 1.2.3.4
1.2.3.4 Matches devices in any zone with the ip 1.2.3.4
z1:5678 Matches devices in zone 1 using port 5678
:5678 Matches devices that use port 5678
R5.6.7.8 Matches devices that use replication ip 5.6.7.8
R:5678 Matches devices that use replication port 5678
1.2.3.4R5.6.7.8 Matches devices that use ip 1.2.3.4 and replication ip
5.6.7.8
/sdb1 Matches devices with the device name sdb1
_shiny Matches devices with shiny in the meta data
_"snet: 5.6.7.8" Matches devices with snet: 5.6.7.8 in the meta data
[::1] Matches devices in any zone with the ip ::1
z1-[::1]:5678 Matches devices in zone 1 with ip ::1 and port 5678
Most specific example::
d74r4z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8"
Nerd explanation:
All items require their single character prefix except the ip, in which
case the - is optional unless the device id or zone is also included.
"""
orig_search_value = search_value
match = {}
if search_value.startswith('d'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['id'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('r'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['region'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('z'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['zone'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('-'):
search_value = search_value[1:]
if search_value and search_value[0].isdigit():
i = 1
while i < len(search_value) and search_value[i] in '0123456789.':
i += 1
match['ip'] = search_value[:i]
search_value = search_value[i:]
elif search_value and search_value.startswith('['):
i = 1
while i < len(search_value) and search_value[i] != ']':
i += 1
i += 1
match['ip'] = search_value[:i].lstrip('[').rstrip(']')
search_value = search_value[i:]
if 'ip' in match:
# ipv6 addresses are converted to all lowercase
# and use the fully expanded representation
match['ip'] = validate_and_normalize_ip(match['ip'])
if search_value.startswith(':'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['port'] = int(search_value[1:i])
search_value = search_value[i:]
# replication parameters
if search_value.startswith('R'):
search_value = search_value[1:]
if search_value and search_value[0].isdigit():
i = 1
while (i < len(search_value) and
search_value[i] in '0123456789.'):
i += 1
match['replication_ip'] = search_value[:i]
search_value = search_value[i:]
elif search_value and search_value.startswith('['):
i = 1
while i < len(search_value) and search_value[i] != ']':
i += 1
i += 1
match['replication_ip'] = search_value[:i].lstrip('[').rstrip(']')
search_value = search_value[i:]
if 'replication_ip' in match:
# ipv6 addresses are converted to all lowercase
# and use the fully expanded representation
match['replication_ip'] = \
validate_and_normalize_ip(match['replication_ip'])
if search_value.startswith(':'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['replication_port'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('/'):
i = 1
while i < len(search_value) and search_value[i] != '_':
i += 1
match['device'] = search_value[1:i]
search_value = search_value[i:]
if search_value.startswith('_'):
match['meta'] = search_value[1:]
search_value = ''
if search_value:
raise ValueError('Invalid <search-value>: %s' %
repr(orig_search_value))
return match
def parse_search_values_from_opts(opts):
"""
Convert optparse style options into a dictionary for searching.
:param opts: optparse style options
:returns: a dictionary with search values to filter devices,
supported parameters are id, region, zone, ip, port,
replication_ip, replication_port, device, weight, meta
"""
search_values = {}
for key in ('id', 'region', 'zone', 'ip', 'port', 'replication_ip',
'replication_port', 'device', 'weight', 'meta'):
value = getattr(opts, key, None)
if value:
if key == 'ip' or key == 'replication_ip':
value = validate_and_normalize_address(value)
search_values[key] = value
return search_values
def parse_change_values_from_opts(opts):
"""
Convert optparse style options into a dictionary for changing.
:param opts: optparse style options
:returns: a dictonary with change values to filter devices,
supported parameters are ip, port, replication_ip,
replication_port
"""
change_values = {}
for key in ('change_ip', 'change_port', 'change_replication_ip',
'change_replication_port', 'change_device', 'change_meta'):
value = getattr(opts, key, None)
if value:
if key == 'change_ip' or key == 'change_replication_ip':
value = validate_and_normalize_address(value)
change_values[key.replace('change_', '')] = value
return change_values
def parse_add_value(add_value):
"""
Convert an add value, like 'r1z2-10.1.2.3:7878/sdf', to a dictionary.
If the string does not start with 'r<N>', then the value of 'region' in
the returned dictionary will be None. Callers should check for this and
set a reasonable default. This is done so callers can emit errors or
warnings if desired.
Similarly, 'replication_ip' and 'replication_port' will be None if not
specified.
:returns: dictionary with keys 'region', 'zone', 'ip', 'port', 'device',
'replication_ip', 'replication_port', 'meta'
:raises ValueError: if add_value is malformed
"""
region = None
rest = add_value
if add_value.startswith('r'):
i = 1
while i < len(add_value) and add_value[i].isdigit():
i += 1
region = int(add_value[1:i])
rest = add_value[i:]
if not rest.startswith('z'):
raise ValueError('Invalid add value: %s' % add_value)
i = 1
while i < len(rest) and rest[i].isdigit():
i += 1
zone = int(rest[1:i])
rest = rest[i:]
if not rest.startswith('-'):
raise ValueError('Invalid add value: %s' % add_value)
ip, port, rest = parse_address(rest[1:])
replication_ip = replication_port = None
if rest.startswith('R'):
replication_ip, replication_port, rest = \
parse_address(rest[1:])
if not rest.startswith('/'):
raise ValueError(
'Invalid add value: %s' % add_value)
i = 1
while i < len(rest) and rest[i] != '_':
i += 1
device_name = rest[1:i]
if not validate_device_name(device_name):
raise ValueError('Invalid device name')
rest = rest[i:]
meta = ''
if rest.startswith('_'):
meta = rest[1:]
return {'region': region, 'zone': zone, 'ip': ip, 'port': port,
'device': device_name, 'replication_ip': replication_ip,
'replication_port': replication_port, 'meta': meta}
def parse_address(rest):
if rest.startswith('['):
# remove first [] for ip
rest = rest.replace('[', '', 1).replace(']', '', 1)
pos = 0
while (pos < len(rest) and
not (rest[pos] == 'R' or rest[pos] == '/')):
pos += 1
address = rest[:pos]
rest = rest[pos:]
port_start = address.rfind(':')
if port_start == -1:
raise ValueError('Invalid port in add value')
ip = address[:port_start]
try:
port = int(address[(port_start + 1):])
except (TypeError, ValueError):
raise ValueError(
'Invalid port %s in add value' % address[port_start:])
# if this is an ipv6 address then we want to convert it
# to all lowercase and use its fully expanded representation
# to make searches easier
ip = validate_and_normalize_ip(ip)
return (ip, port, rest)
def validate_args(argvish):
"""
Build OptionParse and validate it whether the format is new command-line
format or not.
"""
opts, args = parse_args(argvish)
# id can be 0 (swift starts generating id from 0),
# also zone, region and weight can be set to zero.
new_cmd_format = opts.id is not None or opts.region is not None or \
opts.zone is not None or opts.ip or opts.port or \
opts.replication_ip or opts.replication_port or \
opts.device or opts.weight is not None or opts.meta
return (new_cmd_format, opts, args)
def parse_args(argvish):
"""
Build OptionParser and evaluate command line arguments.
"""
parser = optparse.OptionParser()
parser.add_option('-u', '--id', type="int",
help="Device ID")
parser.add_option('-r', '--region', type="int",
help="Region")
parser.add_option('-z', '--zone', type="int",
help="Zone")
parser.add_option('-i', '--ip', type="string",
help="IP address")
parser.add_option('-p', '--port', type="int",
help="Port number")
parser.add_option('-j', '--replication-ip', type="string",
help="Replication IP address")
parser.add_option('-q', '--replication-port', type="int",
help="Replication port number")
parser.add_option('-d', '--device', type="string",
help="Device name (e.g. md0, sdb1)")
parser.add_option('-w', '--weight', type="float",
help="Device weight")
parser.add_option('-m', '--meta', type="string", default="",
help="Extra device info (just a string)")
parser.add_option('-I', '--change-ip', type="string",
help="IP address for change")
parser.add_option('-P', '--change-port', type="int",
help="Port number for change")
parser.add_option('-J', '--change-replication-ip', type="string",
help="Replication IP address for change")
parser.add_option('-Q', '--change-replication-port', type="int",
help="Replication port number for change")
parser.add_option('-D', '--change-device', type="string",
help="Device name (e.g. md0, sdb1) for change")
parser.add_option('-M', '--change-meta', type="string", default="",
help="Extra device info (just a string) for change")
parser.add_option('-y', '--yes', default=False, action="store_true",
help="Assume a yes response to all questions")
return parser.parse_args(argvish)
def parse_builder_ring_filename_args(argvish):
first_arg = argvish[1]
if first_arg.endswith('.ring.gz'):
ring_file = first_arg
builder_file = first_arg[:-len('.ring.gz')] + '.builder'
else:
builder_file = first_arg
if not builder_file.endswith('.builder'):
ring_file = first_arg
else:
ring_file = builder_file[:-len('.builder')]
ring_file += '.ring.gz'
return builder_file, ring_file
def build_dev_from_opts(opts):
"""
Convert optparse stype options into a device dictionary.
"""
for attribute, shortopt, longopt in (['region', '-r', '--region'],
['zone', '-z', '--zone'],
['ip', '-i', '--ip'],
['port', '-p', '--port'],
['device', '-d', '--device'],
['weight', '-w', '--weight']):
if getattr(opts, attribute, None) is None:
raise ValueError('Required argument %s/%s not specified.' %
(shortopt, longopt))
ip = validate_and_normalize_address(opts.ip)
replication_ip = validate_and_normalize_address(
(opts.replication_ip or opts.ip))
replication_port = opts.replication_port or opts.port
if not validate_device_name(opts.device):
raise ValueError('Invalid device name')
return {'region': opts.region, 'zone': opts.zone, 'ip': ip,
'port': opts.port, 'device': opts.device, 'meta': opts.meta,
'replication_ip': replication_ip,
'replication_port': replication_port, 'weight': opts.weight}
def dispersion_report(builder, search_filter=None,
verbose=False, recalculate=False):
if recalculate or not builder._dispersion_graph:
builder._build_dispersion_graph()
max_allowed_replicas = builder._build_max_replicas_by_tier()
worst_tier = None
max_dispersion = 0.0
sorted_graph = []
for tier, replica_counts in sorted(builder._dispersion_graph.items()):
tier_name = get_tier_name(tier, builder)
if search_filter and not re.match(search_filter, tier_name):
continue
max_replicas = int(max_allowed_replicas[tier])
at_risk_parts = sum(replica_counts[i] * (i - max_replicas)
for i in range(max_replicas + 1,
len(replica_counts)))
placed_parts = sum(replica_counts[i] * i for i in range(
1, len(replica_counts)))
tier_dispersion = 100.0 * at_risk_parts / placed_parts
if tier_dispersion > max_dispersion:
max_dispersion = tier_dispersion
worst_tier = tier_name
if not verbose:
continue
tier_report = {
'max_replicas': max_replicas,
'placed_parts': placed_parts,
'dispersion': tier_dispersion,
'replicas': replica_counts,
}
sorted_graph.append((tier_name, tier_report))
return {
'max_dispersion': max_dispersion,
'worst_tier': worst_tier,
'graph': sorted_graph,
}
def validate_replicas_by_tier(replicas, replicas_by_tier):
"""
Validate the sum of the replicas at each tier.
The sum of the replicas at each tier should be less than or very close to
the upper limit indicated by replicas
:param replicas: float,the upper limit of replicas
:param replicas_by_tier: defaultdict,the replicas by tier
"""
tiers = ['cluster', 'regions', 'zones', 'servers', 'devices']
for i, tier_name in enumerate(tiers):
replicas_at_tier = sum(replicas_by_tier[t] for t in
replicas_by_tier if len(t) == i)
if abs(replicas - replicas_at_tier) > 1e-10:
raise exceptions.RingValidationError(
'%s != %s at tier %s' % (
replicas_at_tier, replicas, tier_name))
def format_device(region=None, zone=None, ip=None, device=None, **kwargs):
"""
Convert device dict or tier attributes to a representative string.
:returns: a string, the normalized format of a device tier
"""
return "r%sz%s-%s/%s" % (region, zone, ip, device)
def get_tier_name(tier, builder):
if len(tier) == 1:
return "r%s" % (tier[0], )
if len(tier) == 2:
return "r%sz%s" % (tier[0], tier[1])
if len(tier) == 3:
return "r%sz%s-%s" % (tier[0], tier[1], tier[2])
if len(tier) == 4:
device = builder.devs[tier[3]] or {}
return format_device(tier[0], tier[1], tier[2], device.get(
'device', 'IDd%s' % tier[3]))
def validate_device_name(device_name):
return not (
device_name.startswith(' ') or
device_name.endswith(' ') or
len(device_name) == 0)
def pretty_dev(device):
return format_device(**device)
| smerritt/swift | swift/common/ring/utils.py | Python | apache-2.0 | 26,082 |
# Copyright 2014 Xinyu, He <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging
import logging.handlers
file_name = 'log/home_debug.log'
debug_logger = logging.getLogger('DebugLog')
handler = logging.handlers.RotatingFileHandler(file_name, maxBytes=50*1024*1024)
formatter = logging.Formatter("%(asctime)s - [%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s")
handler.setFormatter(formatter)
debug_logger.setLevel(logging.DEBUG)
debug_logger.addHandler(handler)
debug_logger.propagate = False # now if you use logger it will not log to console.
comm_name = 'log/home.log'
comm_logger = logging.getLogger('CommonLog')
handler = logging.handlers.RotatingFileHandler(comm_name, maxBytes=20*1024*1024)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s [%(filename)s - %(funcName)s] ')
handler.setFormatter(formatter)
comm_logger.setLevel(logging.INFO)
comm_logger.addHandler(handler)
# comm_logger.propagate = False # now if you use logger it will not log to console.
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s')
# def stack_info_debug(info):
# stack_info = inspect.currentframe().f_back.f_code.co_name
# debug_logger.debug("%s: %s" % (stack_info, info))
DEBUG = debug_logger.debug
# DEBUG = stack_info_debug # only output to file
INFO = comm_logger.info
WARN = comm_logger.warning
ERROR = comm_logger.error
CRITICAL = comm_logger.critical
FDEBUG = debug_logger.debug
FINFO = debug_logger.info
FWARN = debug_logger.warning
FERROR = debug_logger.error
FCRITICAL = debug_logger.critical
EXCEPTION = comm_logger.exception
| fangjing828/LEHome | util/log.py | Python | apache-2.0 | 2,186 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Module dedicated functions/classes dealing with rate limiting requests.
This module handles rate liming at a per-user level, so it should not be used
to prevent intentional Denial of Service attacks, as we can assume a DOS can
easily come through multiple user accounts. DOS protection should be done at a
different layer. Instead this module should be used to protect against
unintentional user actions. With that in mind the limits set here should be
high enough as to not rate-limit any intentional actions.
To find good rate-limit values, check how long requests are taking (see logs)
in your environment to assess your capabilities and multiply out to get
figures.
NOTE: As the rate-limiting here is done in memory, this only works per
process (each process will have its own rate limiting counter).
"""
import collections
import copy
import httplib
import math
import re
import time
from oslo.serialization import jsonutils
from oslo.utils import importutils
import webob.dec
import webob.exc
from nova.api.openstack.compute.views import limits as limits_views
from nova.api.openstack import wsgi
from nova.i18n import _
from nova import quota
from nova import utils
from nova import wsgi as base_wsgi
QUOTAS = quota.QUOTAS
LIMITS_PREFIX = "limits."
class LimitsController(object):
"""Controller for accessing limits in the OpenStack API."""
def index(self, req):
"""Return all global and rate limit information."""
context = req.environ['nova.context']
project_id = req.params.get('tenant_id', context.project_id)
quotas = QUOTAS.get_project_quotas(context, project_id,
usages=False)
abs_limits = dict((k, v['limit']) for k, v in quotas.items())
rate_limits = req.environ.get("nova.limits", [])
builder = self._get_view_builder(req)
return builder.build(rate_limits, abs_limits)
def create(self, req, body):
"""Create a new limit."""
raise webob.exc.HTTPNotImplemented()
def delete(self, req, id):
"""Delete the limit."""
raise webob.exc.HTTPNotImplemented()
def detail(self, req):
"""Return limit details."""
raise webob.exc.HTTPNotImplemented()
def show(self, req, id):
"""Show limit information."""
raise webob.exc.HTTPNotImplemented()
def update(self, req, id, body):
"""Update existing limit."""
raise webob.exc.HTTPNotImplemented()
def _get_view_builder(self, req):
return limits_views.ViewBuilder()
def create_resource():
return wsgi.Resource(LimitsController())
class Limit(object):
"""Stores information about a limit for HTTP requests."""
UNITS = dict([(v, k) for k, v in utils.TIME_UNITS.items()])
def __init__(self, verb, uri, regex, value, unit):
"""Initialize a new `Limit`.
@param verb: HTTP verb (POST, PUT, etc.)
@param uri: Human-readable URI
@param regex: Regular expression format for this limit
@param value: Integer number of requests which can be made
@param unit: Unit of measure for the value parameter
"""
self.verb = verb
self.uri = uri
self.regex = regex
self.value = int(value)
self.unit = unit
self.unit_string = self.display_unit().lower()
self.remaining = int(value)
if value <= 0:
raise ValueError("Limit value must be > 0")
self.last_request = None
self.next_request = None
self.water_level = 0
self.capacity = self.unit
self.request_value = float(self.capacity) / float(self.value)
msg = (_("Only %(value)s %(verb)s request(s) can be "
"made to %(uri)s every %(unit_string)s.") %
{'value': self.value, 'verb': self.verb, 'uri': self.uri,
'unit_string': self.unit_string})
self.error_message = msg
def __call__(self, verb, url):
"""Represents a call to this limit from a relevant request.
@param verb: string http verb (POST, GET, etc.)
@param url: string URL
"""
if self.verb != verb or not re.match(self.regex, url):
return
now = self._get_time()
if self.last_request is None:
self.last_request = now
leak_value = now - self.last_request
self.water_level -= leak_value
self.water_level = max(self.water_level, 0)
self.water_level += self.request_value
difference = self.water_level - self.capacity
self.last_request = now
if difference > 0:
self.water_level -= self.request_value
self.next_request = now + difference
return difference
cap = self.capacity
water = self.water_level
val = self.value
self.remaining = math.floor(((cap - water) / cap) * val)
self.next_request = now
def _get_time(self):
"""Retrieve the current time. Broken out for testability."""
return time.time()
def display_unit(self):
"""Display the string name of the unit."""
return self.UNITS.get(self.unit, "UNKNOWN")
def display(self):
"""Return a useful representation of this class."""
return {
"verb": self.verb,
"URI": self.uri,
"regex": self.regex,
"value": self.value,
"remaining": int(self.remaining),
"unit": self.display_unit(),
"resetTime": int(self.next_request or self._get_time()),
}
# "Limit" format is a dictionary with the HTTP verb, human-readable URI,
# a regular-expression to match, value and unit of measure (PER_DAY, etc.)
DEFAULT_LIMITS = [
Limit("POST", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("POST", "*/servers", "^/servers", 120, utils.TIME_UNITS['MINUTE']),
Limit("PUT", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*changes-since*", ".*changes-since.*", 120,
utils.TIME_UNITS['MINUTE']),
Limit("DELETE", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*/os-fping", "^/os-fping", 12, utils.TIME_UNITS['MINUTE']),
]
class RateLimitingMiddleware(base_wsgi.Middleware):
"""Rate-limits requests passing through this middleware. All limit
information is stored in memory for this implementation.
"""
def __init__(self, application, limits=None, limiter=None, **kwargs):
"""Initialize new `RateLimitingMiddleware`.
It wraps the given WSGI application and sets up the given limits.
@param application: WSGI application to wrap
@param limits: String describing limits
@param limiter: String identifying class for representing limits
Other parameters are passed to the constructor for the limiter.
"""
base_wsgi.Middleware.__init__(self, application)
# Select the limiter class
if limiter is None:
limiter = Limiter
else:
limiter = importutils.import_class(limiter)
# Parse the limits, if any are provided
if limits is not None:
limits = limiter.parse_limits(limits)
self._limiter = limiter(limits or DEFAULT_LIMITS, **kwargs)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Represents a single call through this middleware.
We should record the request if we have a limit relevant to it.
If no limit is relevant to the request, ignore it.
If the request should be rate limited, return a fault telling the user
they are over the limit and need to retry later.
"""
verb = req.method
url = req.url
context = req.environ.get("nova.context")
if context:
username = context.user_id
else:
username = None
delay, error = self._limiter.check_for_delay(verb, url, username)
if delay:
msg = _("This request was rate-limited.")
retry = time.time() + delay
return wsgi.RateLimitFault(msg, error, retry)
req.environ["nova.limits"] = self._limiter.get_limits(username)
return self.application
class Limiter(object):
"""Rate-limit checking class which handles limits in memory."""
def __init__(self, limits, **kwargs):
"""Initialize the new `Limiter`.
@param limits: List of `Limit` objects
"""
self.limits = copy.deepcopy(limits)
self.levels = collections.defaultdict(lambda: copy.deepcopy(limits))
# Pick up any per-user limit information
for key, value in kwargs.items():
if key.startswith(LIMITS_PREFIX):
username = key[len(LIMITS_PREFIX):]
self.levels[username] = self.parse_limits(value)
def get_limits(self, username=None):
"""Return the limits for a given user."""
return [limit.display() for limit in self.levels[username]]
def check_for_delay(self, verb, url, username=None):
"""Check the given verb/user/user triplet for limit.
@return: Tuple of delay (in seconds) and error message (or None, None)
"""
delays = []
for limit in self.levels[username]:
delay = limit(verb, url)
if delay:
delays.append((delay, limit.error_message))
if delays:
delays.sort()
return delays[0]
return None, None
# Note: This method gets called before the class is instantiated,
# so this must be either a static method or a class method. It is
# used to develop a list of limits to feed to the constructor. We
# put this in the class so that subclasses can override the
# default limit parsing.
@staticmethod
def parse_limits(limits):
"""Convert a string into a list of Limit instances. This
implementation expects a semicolon-separated sequence of
parenthesized groups, where each group contains a
comma-separated sequence consisting of HTTP method,
user-readable URI, a URI reg-exp, an integer number of
requests which can be made, and a unit of measure. Valid
values for the latter are "SECOND", "MINUTE", "HOUR", and
"DAY".
@return: List of Limit instances.
"""
# Handle empty limit strings
limits = limits.strip()
if not limits:
return []
# Split up the limits by semicolon
result = []
for group in limits.split(';'):
group = group.strip()
if group[:1] != '(' or group[-1:] != ')':
raise ValueError("Limit rules must be surrounded by "
"parentheses")
group = group[1:-1]
# Extract the Limit arguments
args = [a.strip() for a in group.split(',')]
if len(args) != 5:
raise ValueError("Limit rules must contain the following "
"arguments: verb, uri, regex, value, unit")
# Pull out the arguments
verb, uri, regex, value, unit = args
# Upper-case the verb
verb = verb.upper()
# Convert value--raises ValueError if it's not integer
value = int(value)
# Convert unit
unit = unit.upper()
if unit not in utils.TIME_UNITS:
raise ValueError("Invalid units specified")
unit = utils.TIME_UNITS[unit]
# Build a limit
result.append(Limit(verb, uri, regex, value, unit))
return result
class WsgiLimiter(object):
"""Rate-limit checking from a WSGI application. Uses an in-memory
`Limiter`.
To use, POST ``/<username>`` with JSON data such as::
{
"verb" : GET,
"path" : "/servers"
}
and receive a 204 No Content, or a 403 Forbidden with an X-Wait-Seconds
header containing the number of seconds to wait before the action would
succeed.
"""
def __init__(self, limits=None):
"""Initialize the new `WsgiLimiter`.
@param limits: List of `Limit` objects
"""
self._limiter = Limiter(limits or DEFAULT_LIMITS)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, request):
"""Handles a call to this application.
Returns 204 if the request is acceptable to the limiter, else a 403
is returned with a relevant header indicating when the request *will*
succeed.
"""
if request.method != "POST":
raise webob.exc.HTTPMethodNotAllowed()
try:
info = dict(jsonutils.loads(request.body))
except ValueError:
raise webob.exc.HTTPBadRequest()
username = request.path_info_pop()
verb = info.get("verb")
path = info.get("path")
delay, error = self._limiter.check_for_delay(verb, path, username)
if delay:
headers = {"X-Wait-Seconds": "%.2f" % delay}
return webob.exc.HTTPForbidden(headers=headers, explanation=error)
else:
return webob.exc.HTTPNoContent()
class WsgiLimiterProxy(object):
"""Rate-limit requests based on answers from a remote source."""
def __init__(self, limiter_address):
"""Initialize the new `WsgiLimiterProxy`.
@param limiter_address: IP/port combination of where to request limit
"""
self.limiter_address = limiter_address
def check_for_delay(self, verb, path, username=None):
body = jsonutils.dumps({"verb": verb, "path": path})
headers = {"Content-Type": "application/json"}
conn = httplib.HTTPConnection(self.limiter_address)
if username:
conn.request("POST", "/%s" % (username), body, headers)
else:
conn.request("POST", "/", body, headers)
resp = conn.getresponse()
if 200 >= resp.status < 300:
return None, None
return resp.getheader("X-Wait-Seconds"), resp.read() or None
# Note: This method gets called before the class is instantiated,
# so this must be either a static method or a class method. It is
# used to develop a list of limits to feed to the constructor.
# This implementation returns an empty list, since all limit
# decisions are made by a remote server.
@staticmethod
def parse_limits(limits):
"""Ignore a limits string--simply doesn't apply for the limit
proxy.
@return: Empty list.
"""
return []
| silenceli/nova | nova/api/openstack/compute/limits.py | Python | apache-2.0 | 15,344 |
#!/usr/bin/env python
"""HTTP API logic that ties API call renderers with HTTP routes."""
import json
from django import http
from werkzeug import exceptions as werkzeug_exceptions
from werkzeug import routing
import logging
from grr.gui import api_call_renderers
from grr.lib import access_control
from grr.lib import rdfvalue
from grr.lib import registry
def BuildToken(request, execution_time):
"""Build an ACLToken from the request."""
if request.method == "GET":
reason = request.GET.get("reason", "")
elif request.method == "POST":
reason = request.POST.get("reason", "")
token = access_control.ACLToken(
username=request.user,
reason=reason,
process="GRRAdminUI",
expiry=rdfvalue.RDFDatetime().Now() + execution_time)
for field in ["REMOTE_ADDR", "HTTP_X_FORWARDED_FOR"]:
remote_addr = request.META.get(field, "")
if remote_addr:
token.source_ips.append(remote_addr)
return token
HTTP_ROUTING_MAP = routing.Map()
def RegisterHttpRouteHandler(method, route, renderer_cls):
"""Registers given ApiCallRenderer for given method and route."""
HTTP_ROUTING_MAP.add(routing.Rule(
route, methods=[method],
endpoint=renderer_cls))
def GetRendererForHttpRequest(request):
"""Returns a renderer to handle given HTTP request."""
matcher = HTTP_ROUTING_MAP.bind("%s:%s" % (request.environ["SERVER_NAME"],
request.environ["SERVER_PORT"]))
try:
match = matcher.match(request.path, request.method)
except werkzeug_exceptions.NotFound:
raise api_call_renderers.ApiCallRendererNotFoundError(
"No API renderer was found for (%s) %s" % (request.path,
request.method))
renderer_cls, route_args = match
return (renderer_cls(), route_args)
def FillAdditionalArgsFromRequest(request, supported_types):
"""Creates arguments objects from a given request dictionary."""
results = {}
for key, value in request.items():
try:
request_arg_type, request_attr = key.split(".", 1)
except ValueError:
continue
arg_class = None
for key, supported_type in supported_types.items():
if key == request_arg_type:
arg_class = supported_type
if arg_class:
if request_arg_type not in results:
results[request_arg_type] = arg_class()
results[request_arg_type].Set(request_attr, value)
results_list = []
for name, arg_obj in results.items():
additional_args = rdfvalue.ApiCallAdditionalArgs(
name=name, type=supported_types[name].__name__)
additional_args.args = arg_obj
results_list.append(additional_args)
return results_list
class JSONEncoderWithRDFPrimitivesSupport(json.JSONEncoder):
"""Custom JSON encoder that encodes renderers output.
Custom encoder is required to facilitate usage of primitive values -
booleans, integers and strings - in renderers responses.
If renderer references an RDFString, RDFInteger or and RDFBOol when building a
response, it will lead to JSON encoding failure when response encoded,
unless this custom encoder is used. Another way to solve this issue would be
to explicitly call api_value_renderers.RenderValue on every value returned
from the renderer, but it will make the code look overly verbose and dirty.
"""
def default(self, obj):
if isinstance(obj, (rdfvalue.RDFInteger,
rdfvalue.RDFBool,
rdfvalue.RDFString)):
return obj.SerializeToDataStore()
return json.JSONEncoder.default(self, obj)
def BuildResponse(status, rendered_data):
"""Builds HTTPResponse object from rendered data and HTTP status."""
response = http.HttpResponse(status=status, content_type="application/json")
response.write(")]}'\n") # XSSI protection
response.write(json.dumps(rendered_data,
cls=JSONEncoderWithRDFPrimitivesSupport))
return response
def RenderHttpResponse(request):
"""Handles given HTTP request with one of the available API renderers."""
renderer, route_args = GetRendererForHttpRequest(request)
if request.method == "GET":
if renderer.args_type:
unprocessed_request = request.GET
if hasattr(unprocessed_request, "dict"):
unprocessed_request = unprocessed_request.dict()
args = renderer.args_type()
for type_info in args.type_infos:
if type_info.name in route_args:
args.Set(type_info.name, route_args[type_info.name])
elif type_info.name in unprocessed_request:
args.Set(type_info.name, unprocessed_request[type_info.name])
if renderer.additional_args_types:
if not hasattr(args, "additional_args"):
raise RuntimeError("Renderer %s defines additional arguments types "
"but its arguments object does not have "
"'additional_args' field." % renderer)
if hasattr(renderer.additional_args_types, "__call__"):
additional_args_types = renderer.additional_args_types()
else:
additional_args_types = renderer.additional_args_types
args.additional_args = FillAdditionalArgsFromRequest(
unprocessed_request, additional_args_types)
else:
args = None
elif request.method == "POST":
try:
payload = json.loads(request.body)
args = renderer.args_type(**payload)
except Exception as e: # pylint: disable=broad-except
response = http.HttpResponse(status=500)
response.write(")]}'\n") # XSSI protection
response.write(json.dumps(dict(message=str(e))))
logging.exception(
"Error while parsing POST request %s (%s): %s",
request.path, request.method, e)
return response
else:
raise RuntimeError("Unsupported method: %s." % request.method)
token = BuildToken(request, renderer.max_execution_time)
try:
rendered_data = api_call_renderers.HandleApiCall(renderer, args,
token=token)
return BuildResponse(200, rendered_data)
except Exception as e: # pylint: disable=broad-except
logging.exception(
"Error while processing %s (%s) with %s: %s", request.path,
request.method, renderer.__class__.__name__, e)
return BuildResponse(500, dict(message=str(e)))
class HttpApiInitHook(registry.InitHook):
"""Register HTTP API renderers."""
def RunOnce(self):
# Doing late import to avoid circular dependency (http_api.py is referenced
# by api_plugins/docs.py).
#
# pylint: disable=g-import-not-at-top
from grr.gui import api_plugins
# pylint: enable=g-import-not-at-top
# The list is alphabetized by route.
RegisterHttpRouteHandler("GET", "/api/aff4/<path:aff4_path>",
api_plugins.aff4.ApiAff4Renderer)
RegisterHttpRouteHandler("GET", "/api/aff4-index/<path:aff4_path>",
api_plugins.aff4.ApiAff4IndexRenderer)
RegisterHttpRouteHandler("GET", "/api/artifacts",
api_plugins.artifact.ApiArtifactRenderer)
RegisterHttpRouteHandler("GET", "/api/clients",
api_plugins.client.ApiClientSearchRenderer)
RegisterHttpRouteHandler("GET", "/api/clients/<client_id>",
api_plugins.client.ApiClientSummaryRenderer)
RegisterHttpRouteHandler("GET", "/api/clients/labels",
api_plugins.client.ApiClientsLabelsListRenderer)
RegisterHttpRouteHandler("POST", "/api/clients/labels/add",
api_plugins.client.ApiClientsAddLabelsRenderer)
RegisterHttpRouteHandler("POST", "/api/clients/labels/remove",
api_plugins.client.ApiClientsRemoveLabelsRenderer)
RegisterHttpRouteHandler("GET", "/api/config",
api_plugins.config.ApiConfigRenderer)
RegisterHttpRouteHandler("GET", "/api/docs",
api_plugins.docs.ApiDocsRenderer)
RegisterHttpRouteHandler("GET", "/api/flows/<client_id>/<flow_id>/status",
api_plugins.client.ApiFlowStatusRenderer)
RegisterHttpRouteHandler("GET", "/api/hunts",
api_plugins.hunt.ApiHuntsListRenderer)
RegisterHttpRouteHandler("GET", "/api/hunts/<hunt_id>",
api_plugins.hunt.ApiHuntSummaryRenderer)
RegisterHttpRouteHandler("GET", "/api/hunts/<hunt_id>/errors",
api_plugins.hunt.ApiHuntErrorsRenderer)
RegisterHttpRouteHandler("GET", "/api/hunts/<hunt_id>/log",
api_plugins.hunt.ApiHuntLogRenderer)
RegisterHttpRouteHandler(
"GET", "/api/reflection/rdfvalue/<type>",
api_plugins.reflection.ApiRDFValueReflectionRenderer)
RegisterHttpRouteHandler(
"GET", "/api/reflection/rdfvalue/all",
api_plugins.reflection.ApiAllRDFValuesReflectionRenderer)
RegisterHttpRouteHandler(
"GET", "/api/stats/store/<component>/metadata",
api_plugins.stats.ApiStatsStoreMetricsMetadataRenderer)
RegisterHttpRouteHandler(
"GET", "/api/stats/store/<component>/metrics/<metric_name>",
api_plugins.stats.ApiStatsStoreMetricRenderer)
RegisterHttpRouteHandler("GET", "/api/users/me/settings",
api_plugins.user.ApiUserSettingsRenderer)
RegisterHttpRouteHandler("POST", "/api/users/me/settings",
api_plugins.user.ApiSetUserSettingsRenderer)
| ksmaheshkumar/grr | gui/http_api.py | Python | apache-2.0 | 9,614 |
from cloudify.workflows import ctx, parameters
ctx.logger.info(parameters.node_id)
instance = [n for n in ctx.node_instances
if n.node_id == parameters.node_id][0]
for relationship in instance.relationships:
relationship.execute_source_operation('custom_lifecycle.custom_operation')
| cloudify-cosmo/cloudify-manager | tests/integration_tests/resources/dsl/deployment_update/modify_relationship_operation/modification/custom_workflow.py | Python | apache-2.0 | 304 |
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestQuery(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.datastore.query import Query
return Query
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults_wo_implicit_dataset_id(self):
self.assertRaises(ValueError, self._makeOne)
def test_ctor_defaults_w_implicit_dataset_id(self):
from gcloud._testing import _Monkey
from gcloud.datastore import _implicit_environ
_DATASET = 'DATASET'
with _Monkey(_implicit_environ, DATASET_ID=_DATASET):
query = self._makeOne()
self.assertEqual(query.dataset_id, _DATASET)
self.assertEqual(query.kind, None)
self.assertEqual(query.namespace, None)
self.assertEqual(query.ancestor, None)
self.assertEqual(query.filters, [])
self.assertEqual(query.projection, [])
self.assertEqual(query.order, [])
self.assertEqual(query.group_by, [])
def test_ctor_explicit(self):
from gcloud.datastore.key import Key
_DATASET = 'DATASET'
_KIND = 'KIND'
_NAMESPACE = 'NAMESPACE'
ancestor = Key('ANCESTOR', 123, dataset_id=_DATASET)
FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)]
PROJECTION = ['foo', 'bar', 'baz']
ORDER = ['foo', 'bar']
GROUP_BY = ['foo']
query = self._makeOne(
dataset_id=_DATASET,
kind=_KIND,
namespace=_NAMESPACE,
ancestor=ancestor,
filters=FILTERS,
projection=PROJECTION,
order=ORDER,
group_by=GROUP_BY,
)
self.assertEqual(query.dataset_id, _DATASET)
self.assertEqual(query.kind, _KIND)
self.assertEqual(query.namespace, _NAMESPACE)
self.assertEqual(query.ancestor.path, ancestor.path)
self.assertEqual(query.filters, FILTERS)
self.assertEqual(query.projection, PROJECTION)
self.assertEqual(query.order, ORDER)
self.assertEqual(query.group_by, GROUP_BY)
def test_namespace_setter_w_non_string(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
def _assign(val):
query.namespace = val
self.assertRaises(ValueError, _assign, object())
def test_namespace_setter(self):
_DATASET = 'DATASET'
_NAMESPACE = 'NAMESPACE'
query = self._makeOne(_DATASET)
query.namespace = _NAMESPACE
self.assertEqual(query.dataset_id, _DATASET)
self.assertEqual(query.namespace, _NAMESPACE)
def test_kind_setter_w_non_string(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
def _assign(val):
query.kind = val
self.assertRaises(TypeError, _assign, object())
def test_kind_setter_wo_existing(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET)
query.kind = _KIND
self.assertEqual(query.dataset_id, _DATASET)
self.assertEqual(query.kind, _KIND)
def test_kind_setter_w_existing(self):
_DATASET = 'DATASET'
_KIND_BEFORE = 'KIND_BEFORE'
_KIND_AFTER = 'KIND_AFTER'
query = self._makeOne(_DATASET, _KIND_BEFORE)
self.assertEqual(query.kind, _KIND_BEFORE)
query.kind = _KIND_AFTER
self.assertEqual(query.dataset_id, _DATASET)
self.assertEqual(query.kind, _KIND_AFTER)
def test_ancestor_setter_w_non_key(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
def _assign(val):
query.ancestor = val
self.assertRaises(TypeError, _assign, object())
self.assertRaises(TypeError, _assign, ['KIND', 'NAME'])
def test_ancestor_setter_w_key(self):
from gcloud.datastore.key import Key
_DATASET = 'DATASET'
_NAME = u'NAME'
key = Key('KIND', 123, dataset_id='DATASET')
query = self._makeOne(_DATASET)
query.add_filter('name', '=', _NAME)
query.ancestor = key
self.assertEqual(query.ancestor.path, key.path)
def test_ancestor_deleter_w_key(self):
from gcloud.datastore.key import Key
_DATASET = 'DATASET'
key = Key('KIND', 123, dataset_id='DATASET')
query = self._makeOne(_DATASET, ancestor=key)
del query.ancestor
self.assertTrue(query.ancestor is None)
def test_add_filter_setter_w_unknown_operator(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
self.assertRaises(ValueError, query.add_filter,
'firstname', '~~', 'John')
def test_add_filter_w_known_operator(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
query.add_filter('firstname', '=', u'John')
self.assertEqual(query.filters, [('firstname', '=', u'John')])
def test_add_filter_w_all_operators(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
query.add_filter('leq_prop', '<=', u'val1')
query.add_filter('geq_prop', '>=', u'val2')
query.add_filter('lt_prop', '<', u'val3')
query.add_filter('gt_prop', '>', u'val4')
query.add_filter('eq_prop', '=', u'val5')
self.assertEqual(len(query.filters), 5)
self.assertEqual(query.filters[0], ('leq_prop', '<=', u'val1'))
self.assertEqual(query.filters[1], ('geq_prop', '>=', u'val2'))
self.assertEqual(query.filters[2], ('lt_prop', '<', u'val3'))
self.assertEqual(query.filters[3], ('gt_prop', '>', u'val4'))
self.assertEqual(query.filters[4], ('eq_prop', '=', u'val5'))
def test_add_filter_w_known_operator_and_entity(self):
from gcloud.datastore.entity import Entity
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
other = Entity()
other['firstname'] = u'John'
other['lastname'] = u'Smith'
query.add_filter('other', '=', other)
self.assertEqual(query.filters, [('other', '=', other)])
def test_add_filter_w_whitespace_property_name(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
PROPERTY_NAME = ' property with lots of space '
query.add_filter(PROPERTY_NAME, '=', u'John')
self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')])
def test_add_filter___key__valid_key(self):
from gcloud.datastore.key import Key
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
key = Key('Foo', dataset_id='DATASET')
query.add_filter('__key__', '=', key)
self.assertEqual(query.filters, [('__key__', '=', key)])
def test_filter___key__invalid_operator(self):
from gcloud.datastore.key import Key
_DATASET = 'DATASET'
key = Key('Foo', dataset_id='DATASET')
query = self._makeOne(_DATASET)
self.assertRaises(ValueError, query.add_filter, '__key__', '<', key)
def test_filter___key__invalid_value(self):
_DATASET = 'DATASET'
query = self._makeOne(_DATASET)
self.assertRaises(ValueError, query.add_filter, '__key__', '=', None)
def test_projection_setter_empty(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.projection = []
self.assertEqual(query.projection, [])
def test_projection_setter_string(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.projection = 'field1'
self.assertEqual(query.projection, ['field1'])
def test_projection_setter_non_empty(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.projection = ['field1', 'field2']
self.assertEqual(query.projection, ['field1', 'field2'])
def test_projection_setter_multiple_calls(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
_PROJECTION1 = ['field1', 'field2']
_PROJECTION2 = ['field3']
query = self._makeOne(_DATASET, _KIND)
query.projection = _PROJECTION1
self.assertEqual(query.projection, _PROJECTION1)
query.projection = _PROJECTION2
self.assertEqual(query.projection, _PROJECTION2)
def test_keys_only(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.keys_only()
self.assertEqual(query.projection, ['__key__'])
def test_order_setter_empty(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND, order=['foo', '-bar'])
query.order = []
self.assertEqual(query.order, [])
def test_order_setter_string(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.order = 'field'
self.assertEqual(query.order, ['field'])
def test_order_setter_single_item_list_desc(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.order = ['-field']
self.assertEqual(query.order, ['-field'])
def test_order_setter_multiple(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.order = ['foo', '-bar']
self.assertEqual(query.order, ['foo', '-bar'])
def test_group_by_setter_empty(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND, group_by=['foo', 'bar'])
query.group_by = []
self.assertEqual(query.group_by, [])
def test_group_by_setter_string(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.group_by = 'field1'
self.assertEqual(query.group_by, ['field1'])
def test_group_by_setter_non_empty(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
query.group_by = ['field1', 'field2']
self.assertEqual(query.group_by, ['field1', 'field2'])
def test_group_by_multiple_calls(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
_GROUP_BY1 = ['field1', 'field2']
_GROUP_BY2 = ['field3']
query = self._makeOne(_DATASET, _KIND)
query.group_by = _GROUP_BY1
self.assertEqual(query.group_by, _GROUP_BY1)
query.group_by = _GROUP_BY2
self.assertEqual(query.group_by, _GROUP_BY2)
def test_fetch_defaults_wo_implicit_connection(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
query = self._makeOne(_DATASET, _KIND)
self.assertRaises(ValueError, query.fetch)
def test_fetch_defaults_w_implicit_connection(self):
from gcloud._testing import _Monkey
from gcloud.datastore import _implicit_environ
_DATASET = 'DATASET'
_KIND = 'KIND'
connection = _Connection()
query = self._makeOne(_DATASET, _KIND)
with _Monkey(_implicit_environ, CONNECTION=connection):
iterator = query.fetch()
self.assertTrue(iterator._query is query)
self.assertEqual(iterator._limit, None)
self.assertEqual(iterator._offset, 0)
def test_fetch_explicit(self):
_DATASET = 'DATASET'
_KIND = 'KIND'
connection = _Connection()
query = self._makeOne(_DATASET, _KIND)
iterator = query.fetch(limit=7, offset=8, connection=connection)
self.assertTrue(iterator._query is query)
self.assertEqual(iterator._limit, 7)
self.assertEqual(iterator._offset, 8)
class TestIterator(unittest2.TestCase):
_DATASET = 'DATASET'
_NAMESPACE = 'NAMESPACE'
_KIND = 'KIND'
_ID = 123
_START = b'\x00'
_END = b'\xFF'
def _getTargetClass(self):
from gcloud.datastore.query import Iterator
return Iterator
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def _addQueryResults(self, connection, cursor=_END, more=False):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
MORE = datastore_pb.QueryResultBatch.NOT_FINISHED
NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
_ID = 123
entity_pb = datastore_pb.Entity()
entity_pb.key.partition_id.dataset_id = self._DATASET
path_element = entity_pb.key.path_element.add()
path_element.kind = self._KIND
path_element.id = _ID
prop = entity_pb.property.add()
prop.name = 'foo'
prop.value.string_value = u'Foo'
connection._results.append(
([entity_pb], cursor, MORE if more else NO_MORE))
def test_ctor_defaults(self):
connection = _Connection()
query = object()
iterator = self._makeOne(query, connection)
self.assertTrue(iterator._query is query)
self.assertEqual(iterator._limit, None)
self.assertEqual(iterator._offset, 0)
def test_ctor_explicit(self):
connection = _Connection()
query = _Query()
iterator = self._makeOne(query, connection, 13, 29)
self.assertTrue(iterator._query is query)
self.assertEqual(iterator._limit, 13)
self.assertEqual(iterator._offset, 29)
def test_next_page_no_cursors_no_more(self):
from base64 import b64encode
from gcloud.datastore.query import _pb_from_query
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection)
iterator = self._makeOne(query, connection)
entities, more_results, cursor = iterator.next_page()
self.assertEqual(cursor, b64encode(self._END))
self.assertFalse(more_results)
self.assertFalse(iterator._more_results)
self.assertEqual(len(entities), 1)
self.assertEqual(entities[0].key.path,
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[0]['foo'], u'Foo')
qpb = _pb_from_query(query)
qpb.offset = 0
EXPECTED = {
'dataset_id': self._DATASET,
'query_pb': qpb,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
self.assertEqual(connection._called_with, [EXPECTED])
def test_next_page_no_cursors_no_more_w_offset_and_limit(self):
from base64 import b64encode
from gcloud.datastore.query import _pb_from_query
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection)
iterator = self._makeOne(query, connection, 13, 29)
entities, more_results, cursor = iterator.next_page()
self.assertEqual(cursor, b64encode(self._END))
self.assertFalse(more_results)
self.assertFalse(iterator._more_results)
self.assertEqual(len(entities), 1)
self.assertEqual(entities[0].key.path,
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[0]['foo'], u'Foo')
qpb = _pb_from_query(query)
qpb.limit = 13
qpb.offset = 29
EXPECTED = {
'dataset_id': self._DATASET,
'query_pb': qpb,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
self.assertEqual(connection._called_with, [EXPECTED])
def test_next_page_w_cursors_w_more(self):
from base64 import b64decode
from base64 import b64encode
from gcloud.datastore.query import _pb_from_query
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection, cursor=self._END, more=True)
iterator = self._makeOne(query, connection)
iterator._start_cursor = self._START
iterator._end_cursor = self._END
entities, more_results, cursor = iterator.next_page()
self.assertEqual(cursor, b64encode(self._END))
self.assertTrue(more_results)
self.assertTrue(iterator._more_results)
self.assertEqual(iterator._end_cursor, None)
self.assertEqual(b64decode(iterator._start_cursor), self._END)
self.assertEqual(len(entities), 1)
self.assertEqual(entities[0].key.path,
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[0]['foo'], u'Foo')
qpb = _pb_from_query(query)
qpb.offset = 0
qpb.start_cursor = b64decode(self._START)
qpb.end_cursor = b64decode(self._END)
EXPECTED = {
'dataset_id': self._DATASET,
'query_pb': qpb,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
self.assertEqual(connection._called_with, [EXPECTED])
def test_next_page_w_cursors_w_bogus_more(self):
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection, cursor=self._END, more=True)
epb, cursor, _ = connection._results.pop()
connection._results.append((epb, cursor, 4)) # invalid enum
iterator = self._makeOne(query, connection)
self.assertRaises(ValueError, iterator.next_page)
def test___iter___no_more(self):
from gcloud.datastore.query import _pb_from_query
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection)
iterator = self._makeOne(query, connection)
entities = list(iterator)
self.assertFalse(iterator._more_results)
self.assertEqual(len(entities), 1)
self.assertEqual(entities[0].key.path,
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[0]['foo'], u'Foo')
qpb = _pb_from_query(query)
qpb.offset = 0
EXPECTED = {
'dataset_id': self._DATASET,
'query_pb': qpb,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
self.assertEqual(connection._called_with, [EXPECTED])
def test___iter___w_more(self):
from gcloud.datastore.query import _pb_from_query
connection = _Connection()
query = _Query(self._DATASET, self._KIND, self._NAMESPACE)
self._addQueryResults(connection, cursor=self._END, more=True)
self._addQueryResults(connection)
iterator = self._makeOne(query, connection)
entities = list(iterator)
self.assertFalse(iterator._more_results)
self.assertEqual(len(entities), 2)
for entity in entities:
self.assertEqual(
entity.key.path,
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[1]['foo'], u'Foo')
qpb1 = _pb_from_query(query)
qpb1.offset = 0
qpb2 = _pb_from_query(query)
qpb2.offset = 0
qpb2.start_cursor = self._END
EXPECTED1 = {
'dataset_id': self._DATASET,
'query_pb': qpb1,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
EXPECTED2 = {
'dataset_id': self._DATASET,
'query_pb': qpb2,
'namespace': self._NAMESPACE,
'transaction_id': None,
}
self.assertEqual(len(connection._called_with), 2)
self.assertEqual(connection._called_with[0], EXPECTED1)
self.assertEqual(connection._called_with[1], EXPECTED2)
class Test__pb_from_query(unittest2.TestCase):
def _callFUT(self, query):
from gcloud.datastore.query import _pb_from_query
return _pb_from_query(query)
def test_empty(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
pb = self._callFUT(_Query())
self.assertEqual(list(pb.projection), [])
self.assertEqual(list(pb.kind), [])
self.assertEqual(list(pb.order), [])
self.assertEqual(list(pb.group_by), [])
self.assertEqual(pb.filter.property_filter.property.name, '')
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(list(cfilter.filter), [])
self.assertEqual(pb.start_cursor, b'')
self.assertEqual(pb.end_cursor, b'')
self.assertEqual(pb.limit, 0)
self.assertEqual(pb.offset, 0)
def test_projection(self):
pb = self._callFUT(_Query(projection=['a', 'b', 'c']))
self.assertEqual([item.property.name for item in pb.projection],
['a', 'b', 'c'])
def test_kind(self):
pb = self._callFUT(_Query(kind='KIND'))
self.assertEqual([item.name for item in pb.kind], ['KIND'])
def test_ancestor(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key
from gcloud.datastore.helpers import _prepare_key_for_request
ancestor = Key('Ancestor', 123, dataset_id='DATASET')
pb = self._callFUT(_Query(ancestor=ancestor))
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
self.assertEqual(pfilter.value.key_value, ancestor_pb)
def test_filter(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
query = _Query(filters=[('name', '=', u'John')])
query.OPERATORS = {
'=': datastore_pb.PropertyFilter.EQUAL,
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, 'name')
self.assertEqual(pfilter.value.string_value, u'John')
def test_filter_key(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key
from gcloud.datastore.helpers import _prepare_key_for_request
key = Key('Kind', 123, dataset_id='DATASET')
query = _Query(filters=[('__key__', '=', key)])
query.OPERATORS = {
'=': datastore_pb.PropertyFilter.EQUAL,
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND)
self.assertEqual(len(cfilter.filter), 1)
pfilter = cfilter.filter[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
key_pb = _prepare_key_for_request(key.to_protobuf())
self.assertEqual(pfilter.value.key_value, key_pb)
def test_order(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
pb = self._callFUT(_Query(order=['a', '-b', 'c']))
self.assertEqual([item.property.name for item in pb.order],
['a', 'b', 'c'])
self.assertEqual([item.direction for item in pb.order],
[datastore_pb.PropertyOrder.ASCENDING,
datastore_pb.PropertyOrder.DESCENDING,
datastore_pb.PropertyOrder.ASCENDING])
def test_group_by(self):
pb = self._callFUT(_Query(group_by=['a', 'b', 'c']))
self.assertEqual([item.name for item in pb.group_by],
['a', 'b', 'c'])
class _Query(object):
def __init__(self,
dataset_id=None,
kind=None,
namespace=None,
ancestor=None,
filters=(),
projection=(),
order=(),
group_by=()):
self.dataset_id = dataset_id
self.kind = kind
self.namespace = namespace
self.ancestor = ancestor
self.filters = filters
self.projection = projection
self.order = order
self.group_by = group_by
class _Connection(object):
_called_with = None
_cursor = b'\x00'
_skipped = 0
def __init__(self):
self._results = []
self._called_with = []
def run_query(self, **kw):
self._called_with.append(kw)
result, self._results = self._results[0], self._results[1:]
return result
| lucemia/gcloud-python | gcloud/datastore/test_query.py | Python | apache-2.0 | 25,434 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.exceptions import ValidationError # noqa
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from openstack_auth import utils as auth_utils
from horizon import exceptions
from horizon import forms
from horizon import tables
from keystoneclient.exceptions import Conflict # noqa
from openstack_dashboard import api
from openstack_dashboard import policy
class RescopeTokenToProject(tables.LinkAction):
name = "rescope"
verbose_name = _("Set as Active Project")
url = "switch_tenants"
def allowed(self, request, project):
# allow rescoping token to any project the user has a role on,
# authorized_tenants, and that they are not currently scoped to
return next((True for proj in request.user.authorized_tenants
if proj.id == project.id and
project.id != request.user.project_id), False)
def get_link_url(self, project):
# redirects to the switch_tenants url which then will redirect
# back to this page
dash_url = reverse("horizon:identity:projects:index")
base_url = reverse(self.url, args=[project.id])
param = urlencode({"next": dash_url})
return "?".join([base_url, param])
class UpdateMembersLink(tables.LinkAction):
name = "users"
verbose_name = _("Manage Members")
url = "horizon:identity:projects:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("identity", "identity:list_users"),
("identity", "identity:list_roles"))
def get_link_url(self, project):
step = 'update_members'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class UpdateGroupsLink(tables.LinkAction):
name = "groups"
verbose_name = _("Modify Groups")
url = "horizon:identity:projects:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("identity", "identity:list_groups"),)
def allowed(self, request, project):
return api.keystone.VERSIONS.active >= 3
def get_link_url(self, project):
step = 'update_group_members'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class UsageLink(tables.LinkAction):
name = "usage"
verbose_name = _("View Usage")
url = "horizon:identity:projects:usage"
icon = "stats"
policy_rules = (("compute", "compute_extension:simple_tenant_usage:show"),)
def allowed(self, request, project):
return request.user.is_superuser
class CreateProject(tables.LinkAction):
name = "create"
verbose_name = _("Create Project")
url = "horizon:identity:projects:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (('identity', 'identity:create_project'),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
class UpdateProject(tables.LinkAction):
name = "update"
verbose_name = _("Edit Project")
url = "horizon:identity:projects:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (('identity', 'identity:update_project'),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
class ModifyQuotas(tables.LinkAction):
name = "quotas"
verbose_name = _("Modify Quotas")
url = "horizon:identity:projects:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (('compute', "compute_extension:quotas:update"),)
def get_link_url(self, project):
step = 'update_quotas'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class DeleteTenantsAction(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Project",
u"Delete Projects",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Project",
u"Deleted Projects",
count
)
policy_rules = (("identity", "identity:delete_project"),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
def delete(self, request, obj_id):
api.keystone.tenant_delete(request, obj_id)
def handle(self, table, request, obj_ids):
response = \
super(DeleteTenantsAction, self).handle(table, request, obj_ids)
auth_utils.remove_project_cache(request.user.token.id)
return response
class TenantFilterAction(tables.FilterAction):
def filter(self, table, tenants, filter_string):
"""Really naive case-insensitive search."""
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(tenant):
if q in tenant.name.lower():
return True
return False
return filter(comp, tenants)
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, project_id):
project_info = api.keystone.tenant_get(request, project_id,
admin=True)
return project_info
class UpdateCell(tables.UpdateAction):
def allowed(self, request, project, cell):
policy_rule = (("identity", "identity:update_project"),)
return (
(cell.column.name != 'enabled' or
request.user.token.project['id'] != cell.datum.id) and
api.keystone.keystone_can_edit_project() and
policy.check(policy_rule, request))
def update_cell(self, request, datum, project_id,
cell_name, new_cell_value):
# inline update project info
try:
project_obj = datum
# updating changed value by new value
setattr(project_obj, cell_name, new_cell_value)
api.keystone.tenant_update(
request,
project_id,
name=project_obj.name,
description=project_obj.description,
enabled=project_obj.enabled)
except Conflict:
# Returning a nice error message about name conflict. The message
# from exception is not that clear for the users.
message = _("This name is already taken.")
raise ValidationError(message)
except Exception:
exceptions.handle(request, ignore=True)
return False
return True
class TenantsTable(tables.DataTable):
name = tables.Column('name', verbose_name=_('Name'),
form_field=forms.CharField(max_length=64),
update_action=UpdateCell)
description = tables.Column(lambda obj: getattr(obj, 'description', None),
verbose_name=_('Description'),
form_field=forms.CharField(
widget=forms.Textarea(attrs={'rows': 4}),
required=False),
update_action=UpdateCell)
id = tables.Column('id', verbose_name=_('Project ID'))
enabled = tables.Column('enabled', verbose_name=_('Enabled'), status=True,
form_field=forms.BooleanField(
label=_('Enabled'),
required=False),
update_action=UpdateCell)
class Meta:
name = "tenants"
verbose_name = _("Projects")
row_class = UpdateRow
row_actions = (UpdateMembersLink, UpdateGroupsLink, UpdateProject,
UsageLink, ModifyQuotas, DeleteTenantsAction,
RescopeTokenToProject)
table_actions = (TenantFilterAction, CreateProject,
DeleteTenantsAction)
pagination_param = "tenant_marker"
| CiscoSystems/avos | openstack_dashboard/dashboards/identity/projects/tables.py | Python | apache-2.0 | 8,769 |
# Copyright (c) 2012-2016 Seafile Ltd.
import logging
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from django.template.defaultfilters import filesizeformat
from django.utils.translation import ugettext as _
from seaserv import ccnet_api, seafile_api
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error
from seahub.base.accounts import User
from seahub.signals import repo_deleted
from seahub.views import get_system_default_repo_id
from seahub.admin_log.signals import admin_operation
from seahub.admin_log.models import REPO_CREATE, REPO_DELETE, REPO_TRANSFER
from seahub.share.models import FileShare, UploadLinkShare
from seahub.base.templatetags.seahub_tags import email2nickname, email2contact_email
from seahub.group.utils import is_group_member, group_id_to_name
from seahub.utils.repo import get_related_users_by_repo, normalize_repo_status_code, normalize_repo_status_str
from seahub.utils import is_valid_dirent_name, is_valid_email
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
from seahub.api2.endpoints.group_owned_libraries import get_group_id_by_repo_owner
try:
from seahub.settings import MULTI_TENANCY
except ImportError:
MULTI_TENANCY = False
logger = logging.getLogger(__name__)
def get_repo_info(repo):
repo_owner = seafile_api.get_repo_owner(repo.repo_id)
if not repo_owner:
try:
org_repo_owner = seafile_api.get_org_repo_owner(repo.repo_id)
except Exception:
org_repo_owner = None
owner = repo_owner or org_repo_owner or ''
result = {}
result['id'] = repo.repo_id
result['name'] = repo.repo_name
result['owner'] = owner
result['owner_email'] = owner
result['owner_contact_email'] = email2contact_email(owner)
result['size'] = repo.size
result['size_formatted'] = filesizeformat(repo.size)
result['encrypted'] = repo.encrypted
result['file_count'] = repo.file_count
result['status'] = normalize_repo_status_code(repo.status)
result['last_modified'] = timestamp_to_isoformat_timestr(repo.last_modified)
if '@seafile_group' in owner:
group_id = get_group_id_by_repo_owner(owner)
result['group_name'] = group_id_to_name(group_id)
result['owner_name'] = group_id_to_name(group_id)
else:
result['owner_name'] = email2nickname(owner)
return result
class AdminLibraries(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, format=None):
""" List 'all' libraries (by name/owner/page)
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
order_by = request.GET.get('order_by', '').lower().strip()
if order_by and order_by not in ('size', 'file_count'):
error_msg = 'order_by invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# search libraries (by name/owner)
repo_name = request.GET.get('name', '')
owner = request.GET.get('owner', '')
repos = []
if repo_name and owner:
# search by name and owner
orgs = ccnet_api.get_orgs_by_user(owner)
if orgs:
org_id = orgs[0].org_id
owned_repos = seafile_api.get_org_owned_repo_list(org_id, owner)
else:
owned_repos = seafile_api.get_owned_repo_list(owner)
for repo in owned_repos:
if not repo.name or repo.is_virtual:
continue
if repo_name in repo.name:
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": repo_name, "owner": owner, "repos": repos})
elif repo_name:
# search by name(keyword in name)
repos_all = seafile_api.get_repo_list(-1, -1)
for repo in repos_all:
if not repo.name or repo.is_virtual:
continue
if repo_name in repo.name:
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": repo_name, "owner": '', "repos": repos})
elif owner:
# search by owner
orgs = ccnet_api.get_orgs_by_user(owner)
if orgs:
org_id = orgs[0].org_id
owned_repos = seafile_api.get_org_owned_repo_list(org_id, owner)
else:
owned_repos = seafile_api.get_owned_repo_list(owner)
for repo in owned_repos:
if repo.is_virtual:
continue
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": '', "owner": owner, "repos": repos})
# get libraries by page
try:
current_page = int(request.GET.get('page', '1'))
per_page = int(request.GET.get('per_page', '100'))
except ValueError:
current_page = 1
per_page = 100
start = (current_page - 1) * per_page
limit = per_page + 1
if order_by:
repos_all = seafile_api.get_repo_list(start, limit, order_by)
else:
repos_all = seafile_api.get_repo_list(start, limit)
if len(repos_all) > per_page:
repos_all = repos_all[:per_page]
has_next_page = True
else:
has_next_page = False
default_repo_id = get_system_default_repo_id()
repos_all = [r for r in repos_all if not r.is_virtual]
repos_all = [r for r in repos_all if r.repo_id != default_repo_id]
return_results = []
for repo in repos_all:
repo_info = get_repo_info(repo)
return_results.append(repo_info)
page_info = {
'has_next_page': has_next_page,
'current_page': current_page
}
return Response({"page_info": page_info, "repos": return_results})
def post(self, request):
""" Admin create library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
repo_name = request.data.get('name', None)
if not repo_name:
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
username = request.user.username
repo_owner = request.data.get('owner', None)
if repo_owner:
try:
User.objects.get(email=repo_owner)
except User.DoesNotExist:
error_msg = 'User %s not found.' % repo_owner
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
else:
repo_owner = username
try:
repo_id = seafile_api.create_repo(repo_name, '', repo_owner)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo_name,
"owner": repo_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_CREATE, detail=admin_op_detail)
repo = seafile_api.get_repo(repo_id)
repo_info = get_repo_info(repo)
return Response(repo_info)
class AdminLibrary(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, repo_id, format=None):
""" get info of a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo_info = get_repo_info(repo)
return Response(repo_info)
def delete(self, request, repo_id, format=None):
""" delete a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
if get_system_default_repo_id() == repo_id:
error_msg = _('System library can not be deleted.')
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
repo = seafile_api.get_repo(repo_id)
if not repo:
# for case of `seafile-data` has been damaged
# no `repo object` will be returned from seafile api
# delete the database record anyway
try:
seafile_api.remove_repo(repo_id)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'success': True})
repo_name = repo.name
repo_owner = seafile_api.get_repo_owner(repo_id)
if not repo_owner:
repo_owner = seafile_api.get_org_repo_owner(repo_id)
try:
seafile_api.remove_repo(repo_id)
try:
org_id = seafile_api.get_org_id_by_repo_id(repo_id)
related_usernames = get_related_users_by_repo(repo_id,
org_id if org_id and org_id > 0 else None)
except Exception as e:
logger.error(e)
org_id = -1
related_usernames = []
# send signal for seafevents
repo_deleted.send(sender=None, org_id=-1, operator=request.user.username,
usernames=related_usernames, repo_owner=repo_owner,
repo_id=repo_id, repo_name=repo.name)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo_name,
"owner": repo_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_DELETE, detail=admin_op_detail)
return Response({'success': True})
def put(self, request, repo_id, format=None):
""" update a library status, transfer a library, rename a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
# argument check
new_status = request.data.get('status', None)
if new_status:
if new_status not in ('normal', 'read-only'):
error_msg = 'status invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
new_repo_name = request.data.get('name', None)
if new_repo_name:
if not is_valid_dirent_name(new_repo_name):
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
new_owner = request.data.get('owner', None)
if new_owner:
if not is_valid_email(new_owner):
error_msg = 'owner invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# resource check
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if new_status:
try:
seafile_api.set_repo_status(repo_id, normalize_repo_status_str(new_status))
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if new_repo_name:
try:
res = seafile_api.edit_repo(repo_id, new_repo_name, '', None)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if res == -1:
e = 'Admin rename failed: ID of library is %s, edit_repo api called failed.' % \
repo_id
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if new_owner:
try:
new_owner_obj = User.objects.get(email=new_owner)
except User.DoesNotExist:
error_msg = 'User %s not found.' % new_owner
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if not new_owner_obj.permissions.can_add_repo():
error_msg = _('Transfer failed: role of %s is %s, can not add library.') % \
(new_owner, new_owner_obj.role)
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if MULTI_TENANCY:
try:
if seafile_api.get_org_id_by_repo_id(repo_id) > 0:
error_msg = 'Can not transfer organization library.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if ccnet_api.get_orgs_by_user(new_owner):
error_msg = 'Can not transfer library to organization user %s' % new_owner
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
repo_owner = seafile_api.get_repo_owner(repo_id)
if new_owner == repo_owner:
error_msg = _("Library can not be transferred to owner.")
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# get repo shared to user/group list
shared_users = seafile_api.list_repo_shared_to(
repo_owner, repo_id)
shared_groups = seafile_api.list_repo_shared_group_by_user(
repo_owner, repo_id)
# get all pub repos
pub_repos = []
if not request.cloud_mode:
pub_repos = seafile_api.list_inner_pub_repos_by_owner(repo_owner)
# transfer repo
seafile_api.set_repo_owner(repo_id, new_owner)
# reshare repo to user
for shared_user in shared_users:
shared_username = shared_user.user
if new_owner == shared_username:
continue
seafile_api.share_repo(repo_id, new_owner,
shared_username, shared_user.perm)
# reshare repo to group
for shared_group in shared_groups:
shared_group_id = shared_group.group_id
if not is_group_member(shared_group_id, new_owner):
continue
seafile_api.set_group_repo(repo_id, shared_group_id,
new_owner, shared_group.perm)
# reshare repo to links
try:
UploadLinkShare.objects.filter(username=repo_owner, repo_id=repo_id).update(username=new_owner)
FileShare.objects.filter(username=repo_owner, repo_id=repo_id).update(username=new_owner)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# check if current repo is pub-repo
# if YES, reshare current repo to public
for pub_repo in pub_repos:
if repo_id != pub_repo.id:
continue
seafile_api.add_inner_pub_repo(repo_id, pub_repo.permission)
break
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo.name,
"from": repo_owner,
"to": new_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_TRANSFER, detail=admin_op_detail)
repo = seafile_api.get_repo(repo_id)
repo_info = get_repo_info(repo)
return Response(repo_info)
class AdminSearchLibrary(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, format=None):
""" Search library by name.
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
query_str = request.GET.get('query', '').lower().strip()
if not query_str:
error_msg = 'query invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
repos = seafile_api.search_repos_by_name(query_str)
default_repo_id = get_system_default_repo_id()
repos = [r for r in repos if not r.is_virtual]
repos = [r for r in repos if r.repo_id != default_repo_id]
email_dict = {}
name_dict = {}
contact_email_dict = {}
for repo in repos:
# get owner email
repo_id = repo.repo_id
repo_owner = seafile_api.get_repo_owner(repo_id)
if not repo_owner:
try:
org_repo_owner = seafile_api.get_org_repo_owner(repo_id)
except Exception:
org_repo_owner = ''
owner_email = repo_owner or org_repo_owner or ''
if repo_id not in email_dict:
email_dict[repo_id] = owner_email
# get owner name
if repo_id not in name_dict:
# is department library
if '@seafile_group' in owner_email:
group_id = get_group_id_by_repo_owner(owner_email)
owner_name = group_id_to_name(group_id)
else:
owner_name = email2nickname(owner_email)
name_dict[repo_id] = owner_name
# get owner contact_email
if repo_id not in contact_email_dict:
if '@seafile_group' in owner_email:
owner_contact_email = ''
else:
owner_contact_email = email2contact_email(owner_email)
contact_email_dict[repo_id] = owner_contact_email
result = []
for repo in repos:
info = {}
info['id'] = repo.repo_id
info['name'] = repo.repo_name
info['owner_email'] = email_dict.get(repo.repo_id, '')
info['owner_name'] = name_dict.get(repo.repo_id, '')
info['owner_contact_email'] = contact_email_dict.get(repo.repo_id, '')
info['size'] = repo.size
info['encrypted'] = repo.encrypted
info['file_count'] = repo.file_count
info['status'] = normalize_repo_status_code(repo.status)
result.append(info)
return Response({"repo_list": result})
| miurahr/seahub | seahub/api2/endpoints/admin/libraries.py | Python | apache-2.0 | 20,715 |
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from product_spiders.items import Product, ProductLoader
class TigerChefSpider(BaseSpider):
name = 'tigerchef.com'
allowed_domains = ['tigerchef.com']
start_urls = ('http://www.tigerchef.com',)
def parse(self, response):
hxs = HtmlXPathSelector(response)
#categories = hxs.select('//div[@class="sidebar_nav"]//li/a/@href').extract()
categories = hxs.select('//div[@class="navigation"]/ul/li/a/@href').extract()
categories += hxs.select('//ul[@class="cl_subs"]//a/@href').extract()
loaded = False
for category in categories:
loaded = True
yield Request(category)
next_page = hxs.select('//a[@rel="next"]/@href').extract()
if next_page:
base_url = get_base_url(response)
loaded = True
yield Request(urljoin_rfc(base_url, next_page[0]))
products = [product for product in self.parse_products(hxs)]
for product in products:
yield product
if (not products or not loaded) and response.meta.get('retries', 0) < 3:
yield Request(response.url, dont_filter=True,
meta={'retries': response.meta.get('retries', 0) + 1})
def parse_products(self, hxs):
products = hxs.select('//div[starts-with(@id, "product_")]')
for product in products:
product_loader = ProductLoader(Product(), product)
product_loader.add_xpath('url', './/span[@class="description"]/a/@href')
product_loader.add_xpath('name', './/span[@class="description"]/a/b/text()')
#product_loader.add_xpath('price', './/label/text()')
product_loader.add_xpath('price', './/div[@class="our_price"]/text()')
product_loader.add_xpath('sku', './/span[@class="description"]', re='Model #:[\s(]*([\S^)]*)')
yield product_loader.load_item()
| 0--key/lib | portfolio/Python/scrapy/tigerchef/tigerchefspider.py | Python | apache-2.0 | 2,126 |
import unittest
from lib.data_structures.trees.parse_tree import ParseTree
class TestParseTree(unittest.TestCase):
def evaluate(self, expression, result):
parser = ParseTree()
parse_tree = parser.build_parse_tree(expression)
self.assertEqual(parser.evaluate(parse_tree), result)
print(parse_tree)
def testParseTree(self):
self.evaluate("( ( 5 + ( 2 * ( 100 / 2 ) ) ) - 5 )", 100)
self.evaluate("( 10 + 5 )", 15)
self.evaluate("( 10 / 2 )", 5)
self.evaluate("( 5 * ( 5 * ( 5 * 5 ) ) ) )", 625)
self.evaluate("( 10 / ( 5 + ( 3 + 2 ) ) )", 1)
self.evaluate("( 1 + ( 10 - ( 5 + ( 3 + 2 ) ) ) )", 1) | anthonynsimon/python-data-structures-algorithms | tests/test_parse_tree.py | Python | apache-2.0 | 686 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from iptest.assert_util import *
add_clr_assemblies("loadorder_3")
# namespace First {
# public class Generic1<K, V> {
# public static string Flag = typeof(Generic1<,>).FullName;
# }
# }
import First
AreEqual(First.Generic1[int, int].Flag, "First.Generic1`2")
add_clr_assemblies("loadorder_3g")
# namespace First {
# public class Generic1<K, V> {
# public static string Flag = typeof(Generic1<,>).FullName + "_Same";
# }
# }
AreEqual(First.Generic1[int, int].Flag, "First.Generic1`2_Same")
from First import *
AreEqual(Generic1[int, int].Flag, "First.Generic1`2_Same")
| slozier/ironpython2 | Tests/interop/net/loadorder/t3g1.py | Python | apache-2.0 | 833 |
from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
import copy
import logging
import django
from api.webview.models import HarvesterResponse, Document, Version
from scrapi import events
from scrapi.util import json_without_bytes
from scrapi.linter import RawDocument, NormalizedDocument
from scrapi.processing import DocumentTuple
from scrapi.processing.base import BaseProcessor, BaseHarvesterResponse, BaseDatabaseManager
django.setup()
logger = logging.getLogger(__name__)
class DatabaseManager(BaseDatabaseManager):
'''All database management is performed by django'''
def setup(self):
return True
def tear_down(self):
pass
def clear(self, force=False):
pass
def celery_setup(self, *args, **kwargs):
pass
def paginated(query, page_size=10):
for offset in range(0, query.count(), page_size):
for doc in query[offset:offset + page_size]:
yield doc
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
manager = DatabaseManager()
def documents(self, *sources):
q = Document.objects.all()
querysets = (q.filter(source=source) for source in sources) if sources else [q]
for query in querysets:
for doc in paginated(query):
try:
raw = RawDocument(doc.raw, clean=False, validate=False)
except AttributeError as e:
logger.info('{} -- Malformed rawdoc in database, skipping'.format(e))
raw = None
continue
normalized = NormalizedDocument(doc.normalized, validate=False, clean=False) if doc.normalized else None
yield DocumentTuple(raw, normalized)
def get(self, source, docID):
try:
document = Document.objects.get(source=source, docID=docID)
except Document.DoesNotExist:
return None
raw = RawDocument(document.raw, clean=False, validate=False)
normalized = NormalizedDocument(document.normalized, validate=False, clean=False) if document.normalized else None
return DocumentTuple(raw, normalized)
def delete(self, source, docID):
doc = Document.objects.get(source=source, docID=docID)
doc.delete()
def create(self, attributes):
attributes = json_without_bytes(attributes)
Document.objects.create(
source=attributes['source'],
docID=attributes['docID'],
providerUpdatedDateTime=None,
raw=attributes,
normalized=None
).save()
@property
def HarvesterResponseModel(self):
return HarvesterResponseModel
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
document = self.version(raw=raw_doc)
timestamps = raw_doc.get('timestamps')
modified_doc = copy.deepcopy(raw_doc.attributes)
document.raw = modified_doc
document.timestamps = timestamps
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
document = self.version(raw=raw_doc, normalized=normalized)
timestamps = raw_doc.get('timestamps') or normalized.get('timestamps')
document.raw = raw_doc.attributes
document.timestamps = timestamps
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, source, docID):
try:
return Document.objects.get(key=Document._make_key(source, docID))
except Document.DoesNotExist:
return None
def version(self, raw=None, normalized=None):
old_doc = self._get_by_source_id(raw['source'], raw['docID'])
if old_doc:
raw_changed = raw and self.different(raw.attributes, old_doc.raw)
norm_changed = normalized and self.different(normalized.attributes, old_doc.normalized)
version = Version(
key=old_doc,
source=old_doc.source,
docID=old_doc.docID,
providerUpdatedDateTime=old_doc.providerUpdatedDateTime,
raw=old_doc.raw,
normalized=old_doc.normalized,
status=old_doc.status,
timestamps=old_doc.timestamps
)
if raw_changed or norm_changed:
version.save()
return old_doc
else:
return Document.objects.create(source=raw['source'], docID=raw['docID'])
def get_versions(self, source, docID):
doc = self._get_by_source_id(source, docID)
for version in doc.version_set.all().order_by('id'):
yield DocumentTuple(
RawDocument(version.raw, clean=False, validate=False),
NormalizedDocument(version.normalized, clean=False, validate=False)
)
yield DocumentTuple(
RawDocument(doc.raw, clean=False, validate=False),
NormalizedDocument(doc.normalized, clean=False, validate=False)
)
class HarvesterResponseModel(BaseHarvesterResponse):
response = None
def __init__(self, *args, **kwargs):
if kwargs:
key = kwargs['method'].lower() + kwargs['url'].lower()
self.response = HarvesterResponse(key=key, *args, **kwargs)
else:
self.response = args[0]
@property
def method(self):
return str(self.response.method)
@property
def url(self):
return str(self.response.url)
@property
def ok(self):
return bool(self.response.ok)
@property
def content(self):
if isinstance(self.response.content, memoryview):
return self.response.content.tobytes()
if isinstance(self.response.content, bytes):
return self.response.content
return str(self.response.content)
@property
def encoding(self):
return str(self.response.encoding)
@property
def headers_str(self):
return str(self.response.headers_str)
@property
def status_code(self):
return int(self.response.status_code)
@property
def time_made(self):
return str(self.response.time_made)
def save(self, *args, **kwargs):
self.response.save()
return self
def update(self, **kwargs):
for k, v in kwargs.items():
setattr(self.response, k, v)
return self.save()
@classmethod
def get(cls, url=None, method=None):
key = method.lower() + url.lower()
try:
return cls(HarvesterResponse.objects.get(key=key))
except HarvesterResponse.DoesNotExist:
raise cls.DoesNotExist
| erinspace/scrapi | scrapi/processing/postgres.py | Python | apache-2.0 | 6,903 |
from module import *
| Skaper/RMCStudio | images/emotion/eyes/type1/ico/__init__.py | Python | apache-2.0 | 21 |
"""Provide common test tools for Z-Wave JS."""
AIR_TEMPERATURE_SENSOR = "sensor.multisensor_6_air_temperature"
HUMIDITY_SENSOR = "sensor.multisensor_6_humidity"
ENERGY_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed_2"
POWER_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed"
SWITCH_ENTITY = "switch.smart_plug_with_two_usb_ports"
LOW_BATTERY_BINARY_SENSOR = "binary_sensor.multisensor_6_low_battery_level"
ENABLED_LEGACY_BINARY_SENSOR = "binary_sensor.z_wave_door_window_sensor_any"
DISABLED_LEGACY_BINARY_SENSOR = "binary_sensor.multisensor_6_any"
NOTIFICATION_MOTION_BINARY_SENSOR = (
"binary_sensor.multisensor_6_home_security_motion_detection"
)
NOTIFICATION_MOTION_SENSOR = "sensor.multisensor_6_home_security_motion_sensor_status"
PROPERTY_DOOR_STATUS_BINARY_SENSOR = (
"binary_sensor.august_smart_lock_pro_3rd_gen_the_current_status_of_the_door"
)
CLIMATE_RADIO_THERMOSTAT_ENTITY = "climate.z_wave_thermostat"
CLIMATE_DANFOSS_LC13_ENTITY = "climate.living_connect_z_thermostat"
CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY = "climate.thermostatic_valve"
CLIMATE_FLOOR_THERMOSTAT_ENTITY = "climate.floor_thermostat"
CLIMATE_MAIN_HEAT_ACTIONNER = "climate.main_heat_actionner"
BULB_6_MULTI_COLOR_LIGHT_ENTITY = "light.bulb_6_multi_color"
EATON_RF9640_ENTITY = "light.allloaddimmer"
AEON_SMART_SWITCH_LIGHT_ENTITY = "light.smart_switch_6"
ID_LOCK_CONFIG_PARAMETER_SENSOR = (
"sensor.z_wave_module_for_id_lock_150_and_101_config_parameter_door_lock_mode"
)
| w1ll1am23/home-assistant | tests/components/zwave_js/common.py | Python | apache-2.0 | 1,508 |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from sahara import conductor as c
from sahara import context
from sahara import exceptions as e
from sahara.i18n import _
from sahara.plugins.general import utils as plugin_utils
from sahara.plugins.spark import config_helper as c_helper
from sahara.service.edp import base_engine
from sahara.service.edp import job_utils
from sahara.service.validations.edp import job_execution as j
from sahara.utils import edp
from sahara.utils import files
from sahara.utils import general
from sahara.utils import remote
conductor = c.API
class SparkJobEngine(base_engine.JobEngine):
def __init__(self, cluster):
self.cluster = cluster
def _get_pid_and_inst_id(self, job_id):
try:
pid, inst_id = job_id.split("@", 1)
if pid and inst_id:
return (pid, inst_id)
except Exception:
pass
return "", ""
def _get_instance_if_running(self, job_execution):
pid, inst_id = self._get_pid_and_inst_id(job_execution.oozie_job_id)
if not pid or not inst_id or (
job_execution.info['status'] in edp.JOB_STATUSES_TERMINATED):
return None, None
# TODO(tmckay): well, if there is a list index out of range
# error here it probably means that the instance is gone. If we
# have a job execution that is not terminated, and the instance
# is gone, we should probably change the status somehow.
# For now, do nothing.
try:
instance = general.get_instances(self.cluster, [inst_id])[0]
except Exception:
instance = None
return pid, instance
def _get_result_file(self, r, job_execution):
result = os.path.join(job_execution.extra['spark-path'], "result")
return r.execute_command("cat %s" % result,
raise_when_error=False)
def _check_pid(self, r, pid):
ret, stdout = r.execute_command("ps hp %s" % pid,
raise_when_error=False)
return ret
def _get_job_status_from_remote(self, r, pid, job_execution):
# If the pid is there, it's still running
if self._check_pid(r, pid) == 0:
return {"status": edp.JOB_STATUS_RUNNING}
# The process ended. Look in the result file to get the exit status
ret, stdout = self._get_result_file(r, job_execution)
if ret == 0:
exit_status = stdout.strip()
if exit_status == "0":
return {"status": edp.JOB_STATUS_SUCCEEDED}
# SIGINT will yield either -2 or 130
elif exit_status in ["-2", "130"]:
return {"status": edp.JOB_STATUS_KILLED}
# Well, process is done and result is missing or unexpected
return {"status": edp.JOB_STATUS_DONEWITHERROR}
def cancel_job(self, job_execution):
pid, instance = self._get_instance_if_running(job_execution)
if instance is not None:
with remote.get_remote(instance) as r:
ret, stdout = r.execute_command("kill -SIGINT %s" % pid,
raise_when_error=False)
if ret == 0:
# We had some effect, check the status
return self._get_job_status_from_remote(r,
pid, job_execution)
def get_job_status(self, job_execution):
pid, instance = self._get_instance_if_running(job_execution)
if instance is not None:
with remote.get_remote(instance) as r:
return self._get_job_status_from_remote(r, pid, job_execution)
def _job_script(self):
path = "service/edp/resources/launch_command.py"
return files.get_file_text(path)
def run_job(self, job_execution):
ctx = context.ctx()
job = conductor.job_get(ctx, job_execution.job_id)
proxy_configs = job_execution.job_configs.get('proxy_configs')
# We'll always run the driver program on the master
master = plugin_utils.get_instance(self.cluster, "master")
# TODO(tmckay): wf_dir should probably be configurable.
# The only requirement is that the dir is writable by the image user
wf_dir = job_utils.create_workflow_dir(master, '/tmp/spark-edp', job,
job_execution.id)
paths = job_utils.upload_job_files(master, wf_dir, job,
libs_subdir=False,
proxy_configs=proxy_configs)
# We can shorten the paths in this case since we'll run out of wf_dir
paths = [os.path.basename(p) for p in paths]
# TODO(tmckay): for now, paths[0] is always assumed to be the app
# jar and we generate paths in order (mains, then libs).
# When we have a Spark job type, we can require a "main" and set
# the app jar explicitly to be "main"
app_jar = paths.pop(0)
# The rest of the paths will be passed with --jars
additional_jars = ",".join(paths)
if additional_jars:
additional_jars = "--jars " + additional_jars
# Launch the spark job using spark-submit and deploy_mode = client
host = master.hostname()
port = c_helper.get_config_value("Spark", "Master port", self.cluster)
spark_submit = os.path.join(
c_helper.get_config_value("Spark",
"Spark home",
self.cluster),
"bin/spark-submit")
job_class = job_execution.job_configs.configs["edp.java.main_class"]
# TODO(tmckay): we need to clean up wf_dirs on long running clusters
# TODO(tmckay): probably allow for general options to spark-submit
args = " ".join(job_execution.job_configs.get('args', []))
# The redirects of stdout and stderr will preserve output in the wf_dir
cmd = "%s %s --class %s %s --master spark://%s:%s %s" % (
spark_submit,
app_jar,
job_class,
additional_jars,
host,
port,
args)
# If an exception is raised here, the job_manager will mark
# the job failed and log the exception
with remote.get_remote(master) as r:
# Upload the command launch script
launch = os.path.join(wf_dir, "launch_command")
r.write_file_to(launch, self._job_script())
r.execute_command("chmod +x %s" % launch)
ret, stdout = r.execute_command(
"cd %s; ./launch_command %s > /dev/null 2>&1 & echo $!"
% (wf_dir, cmd))
if ret == 0:
# Success, we'll add the wf_dir in job_execution.extra and store
# pid@instance_id as the job id
# We know the job is running so return "RUNNING"
return (stdout.strip() + "@" + master.id,
edp.JOB_STATUS_RUNNING,
{'spark-path': wf_dir})
# Hmm, no execption but something failed.
# Since we're using backgrounding with redirect, this is unlikely.
raise e.EDPError(_("Spark job execution failed. Exit status = "
"%(status)s, stdout = %(stdout)s") %
{'status': ret, 'stdout': stdout})
def validate_job_execution(self, cluster, job, data):
j.check_main_class_present(data, job)
@staticmethod
def get_possible_job_config(job_type):
return {'job_config': {'configs': [], 'args': []}}
@staticmethod
def get_supported_job_types():
return [edp.JOB_TYPE_SPARK]
| keedio/sahara | sahara/service/edp/spark/engine.py | Python | apache-2.0 | 8,343 |
Subsets and Splits