text
stringlengths 4
1.02M
| meta
dict |
|---|---|
def get_election_year_from_term_class(term_class):
# make sure we've got a string
term_class = str(term_class)
if term_class == '1':
return 2018
elif term_class == '2':
return 2014
elif term_class == '3':
return 2016
return None
def get_term_class_from_election_year(election_year):
#make sure we've got an int
election_year = int(election_year)
if election_year in (2018, 2012, 2006, 2000):
return '1'
elif election_year in (2014, 2008, 2002):
return '2'
elif election_year in (2016, 2010, 2004):
return '3'
return None
|
{
"content_hash": "00b4173a5e9f268f2317a89b3854355f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 53,
"avg_line_length": 29.38095238095238,
"alnum_prop": 0.6094003241491086,
"repo_name": "sunlightlabs/read_FEC",
"id": "69dec00d9733932d8a5b448f661fbee437aaa4f2",
"size": "680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fecreader/summary_data/utils/term_reference.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "27432"
},
{
"name": "HTML",
"bytes": "357960"
},
{
"name": "JavaScript",
"bytes": "129989"
},
{
"name": "Python",
"bytes": "1881514"
},
{
"name": "Shell",
"bytes": "10604"
}
],
"symlink_target": ""
}
|
import wtforms.fields.html5
from flask.ext import wtf
class SettingsForm(wtf.Form):
email = wtforms.fields.html5.EmailField(
'E-mail', [wtforms.validators.Optional(), wtforms.validators.Email()],
description='E-mail address that will be used for notifications.')
submit = wtforms.SubmitField('Save')
|
{
"content_hash": "aef5157b796355b64012f34167191bd6",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 78,
"avg_line_length": 36.22222222222222,
"alnum_prop": 0.7208588957055214,
"repo_name": "aromanovich/kozmic-ci",
"id": "0f590baf0d61bfd35f02cdd06a7716758f4bf613",
"size": "342",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "kozmic/accounts/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "8835"
},
{
"name": "HTML",
"bytes": "25900"
},
{
"name": "JavaScript",
"bytes": "2117"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "299139"
},
{
"name": "Shell",
"bytes": "4475"
}
],
"symlink_target": ""
}
|
"""
Created on Jun 19, 2012
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Jun 19, 2012"
import unittest
import os
import warnings
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from pymatgen.apps.borg.queen import BorgQueen
from pymatgen.entries.computed_entries import ComputedEntry
from pymatgen.electronic_structure.dos import CompleteDos
from pymatgen.core.structure import Structure
from matgendb.query_engine import QueryEngine
from matgendb.creator import VaspToDbTaskDrone
test_dir = os.path.join(os.path.dirname(__file__), "..", "..",
'test_files')
class VaspToDbTaskDroneTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
try:
cls.conn = MongoClient()
except ConnectionFailure:
cls.conn = None
def test_get_valid_paths(self):
drone = VaspToDbTaskDrone(simulate_mode=True)
all_paths = []
for path in os.walk(os.path.join(test_dir, 'db_test')):
all_paths.extend(drone.get_valid_paths(path))
self.assertEqual(len(all_paths), 6)
def test_to_from_dict(self):
drone = VaspToDbTaskDrone(database="wacky", simulate_mode=True)
d = drone.as_dict()
drone = VaspToDbTaskDrone.from_dict(d)
self.assertTrue(drone.simulate)
self.assertEqual(drone.database, "wacky")
def test_assimilate(self):
"""Borg assimilation code.
This takes too long for a unit test!
"""
simulate = True if VaspToDbTaskDroneTest.conn is None else False
drone = VaspToDbTaskDrone(database="creator_unittest",
simulate_mode=simulate,
parse_dos=True, compress_dos=1)
queen = BorgQueen(drone)
queen.serial_assimilate(os.path.join(test_dir, 'db_test'))
data = queen.get_data()
self.assertEqual(len(data), 6)
if VaspToDbTaskDroneTest.conn:
db = VaspToDbTaskDroneTest.conn["creator_unittest"]
data = db.tasks.find()
self.assertEqual(data.count(), 6)
warnings.warn("Actual db insertion mode.")
for d in data:
dir_name = d['dir_name']
if dir_name.endswith("killed_mp_aflow"):
self.assertEqual(d['state'], "killed")
self.assertFalse(d['is_hubbard'])
self.assertEqual(d['pretty_formula'], "SiO2")
elif dir_name.endswith("stopped_mp_aflow"):
self.assertEqual(d['state'], "stopped")
self.assertEqual(d['pretty_formula'], "ThFe5P3")
elif dir_name.endswith("success_mp_aflow"):
self.assertEqual(d['state'], "successful")
self.assertEqual(d['pretty_formula'], "TbZn(BO2)5")
self.assertAlmostEqual(d['output']['final_energy'],
-526.66747274, 4)
elif dir_name.endswith("Li2O_aflow"):
self.assertEqual(d['state'], "successful")
self.assertEqual(d['pretty_formula'], "Li2O")
self.assertAlmostEqual(d['output']['final_energy'],
-14.31446494, 6)
self.assertEqual(len(d["calculations"]), 2)
self.assertEqual(d['input']['is_lasph'], False)
self.assertEqual(d['input']['xc_override'], None)
elif dir_name.endswith("Li2O"):
self.assertEqual(d['state'], "successful")
self.assertEqual(d['pretty_formula'], "Li2O")
self.assertAlmostEqual(d['output']['final_energy'],
-14.31337758, 6)
self.assertEqual(len(d["calculations"]), 1)
self.assertEqual(len(d["custodian"]), 1)
self.assertEqual(len(d["custodian"][0]["corrections"]), 1)
elif dir_name.endswith("Li2O_aflow_lasph"):
self.assertEqual(d['state'], "successful")
self.assertEqual(d['pretty_formula'], "Li2O")
self.assertAlmostEqual(d['output']['final_energy'],
-13.998171, 6)
self.assertEqual(len(d["calculations"]), 2)
self.assertEqual(d['input']['is_lasph'], True)
self.assertEqual(d['input']['xc_override'], "PS")
if VaspToDbTaskDroneTest.conn:
warnings.warn("Testing query engine mode.")
qe = QueryEngine(database="creator_unittest")
self.assertEqual(qe.query().count(), 6)
#Test mappings by query engine.
for r in qe.query(criteria={"pretty_formula": "Li2O"},
properties=["dir_name", "energy",
"calculations", "input"]):
if r["dir_name"].endswith("Li2O_aflow"):
self.assertAlmostEqual(r['energy'], -14.31446494, 4)
self.assertEqual(len(r["calculations"]), 2)
self.assertEqual(r["input"]["is_lasph"], False)
self.assertEqual(r['input']['xc_override'], None)
elif r["dir_name"].endswith("Li2O"):
self.assertAlmostEqual(r['energy'],
-14.31337758, 4)
self.assertEqual(len(r["calculations"]), 1)
self.assertEqual(r["input"]["is_lasph"], False)
self.assertEqual(r['input']['xc_override'], None)
#Test lasph
e = qe.get_entries({"dir_name":{"$regex":"lasph"}})
self.assertEqual(len(e), 1)
self.assertEqual(e[0].parameters["is_lasph"], True)
self.assertEqual(e[0].parameters["xc_override"], "PS")
# Test query one.
d = qe.query_one(criteria={"pretty_formula": "TbZn(BO2)5"},
properties=["energy"])
self.assertAlmostEqual(d['energy'], -526.66747274, 4)
d = qe.get_entries_in_system(["Li", "O"])
self.assertEqual(len(d), 3)
self.assertIsInstance(d[0], ComputedEntry)
s = qe.get_structure_from_id(d[0].entry_id)
self.assertIsInstance(s, Structure)
self.assertEqual(s.formula, "Li2 O1")
self.assertIsInstance(qe.get_dos_from_id(d[0].entry_id), CompleteDos)
@classmethod
def tearDownClass(cls):
if cls.conn is not None:
cls.conn.drop_database("creator_unittest")
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "a2244a588323905a4d573678680a91ab",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 81,
"avg_line_length": 42.01242236024845,
"alnum_prop": 0.5526315789473685,
"repo_name": "migueldiascosta/pymatgen-db",
"id": "c29be768bce48620a42102d86a2568128821190e",
"size": "6787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "matgendb/tests/test_creator_and_query_engine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "31964"
},
{
"name": "CSS",
"bytes": "212547"
},
{
"name": "HTML",
"bytes": "2280625"
},
{
"name": "JavaScript",
"bytes": "1312249"
},
{
"name": "PHP",
"bytes": "9068"
},
{
"name": "Python",
"bytes": "344019"
},
{
"name": "Shell",
"bytes": "419"
}
],
"symlink_target": ""
}
|
from asynq import AsyncTask, asynq
from asynq.generator import (
END_OF_GENERATOR,
async_generator,
list_of_generator,
take_first,
Value,
)
from qcore.asserts import assert_eq, assert_is, assert_is_instance, AssertRaises
def test_value():
val = Value("value")
assert_eq("value", val.value)
assert_eq("<Value: 'value'>", repr(val))
@asynq()
def alen(seq):
return len(seq)
@async_generator()
def generator():
for value in ([], [1], [1, 2]):
length = yield alen.asynq(value)
yield Value(length)
@async_generator()
def generator_with_more_yields():
for task in generator():
value = yield task
value = yield alen.asynq([value] * value)
yield Value(value)
yield alen.asynq([1, 2])
@async_generator()
def generator_without_yields():
for i in range(3):
yield Value(i)
def test_list_of_generator():
assert_eq([0, 1, 2], list_of_generator(generator()))
assert_eq([0, 1, 2], list_of_generator(generator_with_more_yields()))
def test_take_first():
gen = generator()
assert_eq([0], take_first(gen, 1))
assert_eq([1, 2], take_first(gen, 2))
assert_eq([], take_first(gen, 3))
gen = generator_with_more_yields()
assert_eq([0, 1, 2], take_first(gen, 4))
gen = generator_without_yields()
assert_eq([0, 1, 2], take_first(gen, 4))
def test_must_compute():
gen = generator()
next(gen)
with AssertRaises(RuntimeError):
next(gen)
def test_values():
gen = generator_with_more_yields()
for i in range(3):
task = next(gen)
assert_is_instance(task, AsyncTask)
assert_eq(i, task.value())
task = next(gen)
assert_is_instance(task, AsyncTask)
assert_is(END_OF_GENERATOR, task.value())
with AssertRaises(StopIteration):
next(gen)
|
{
"content_hash": "5b82e8bb9d46682ee2d30ad8f5710508",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 80,
"avg_line_length": 22.402439024390244,
"alnum_prop": 0.617855198693522,
"repo_name": "quora/asynq",
"id": "f9c33688bf8e1f2ae58f6eb9cbac361f9da86144",
"size": "2411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "asynq/tests/test_generator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cython",
"bytes": "14533"
},
{
"name": "Python",
"bytes": "215286"
}
],
"symlink_target": ""
}
|
"""
Tests for L{twisted.protocols.amp}.
"""
import datetime
import decimal
from zope.interface import implements
from zope.interface.verify import verifyClass, verifyObject
from twisted.python.util import setIDFunction
from twisted.python import filepath
from twisted.python.failure import Failure
from twisted.protocols import amp
from twisted.trial import unittest
from twisted.internet import protocol, defer, error, reactor, interfaces
from twisted.test import iosim
from twisted.test.proto_helpers import StringTransport
ssl = None
try:
from twisted.internet import ssl
except ImportError:
pass
if ssl and not ssl.supported:
ssl = None
if ssl is None:
skipSSL = "SSL not available"
else:
skipSSL = None
class TestProto(protocol.Protocol):
"""
A trivial protocol for use in testing where a L{Protocol} is expected.
@ivar instanceId: the id of this instance
@ivar onConnLost: deferred that will fired when the connection is lost
@ivar dataToSend: data to send on the protocol
"""
instanceCount = 0
def __init__(self, onConnLost, dataToSend):
self.onConnLost = onConnLost
self.dataToSend = dataToSend
self.instanceId = TestProto.instanceCount
TestProto.instanceCount = TestProto.instanceCount + 1
def connectionMade(self):
self.data = []
self.transport.write(self.dataToSend)
def dataReceived(self, bytes):
self.data.append(bytes)
def connectionLost(self, reason):
self.onConnLost.callback(self.data)
def __repr__(self):
"""
Custom repr for testing to avoid coupling amp tests with repr from
L{Protocol}
Returns a string which contains a unique identifier that can be looked
up using the instanceId property::
<TestProto #3>
"""
return "<TestProto #%d>" % (self.instanceId,)
class SimpleSymmetricProtocol(amp.AMP):
def sendHello(self, text):
return self.callRemoteString(
"hello",
hello=text)
def amp_HELLO(self, box):
return amp.Box(hello=box['hello'])
def amp_HOWDOYOUDO(self, box):
return amp.QuitBox(howdoyoudo='world')
class UnfriendlyGreeting(Exception):
"""Greeting was insufficiently kind.
"""
class DeathThreat(Exception):
"""Greeting was insufficiently kind.
"""
class UnknownProtocol(Exception):
"""Asked to switch to the wrong protocol.
"""
class TransportPeer(amp.Argument):
# this serves as some informal documentation for how to get variables from
# the protocol or your environment and pass them to methods as arguments.
def retrieve(self, d, name, proto):
return ''
def fromStringProto(self, notAString, proto):
return proto.transport.getPeer()
def toBox(self, name, strings, objects, proto):
return
class Hello(amp.Command):
commandName = 'hello'
arguments = [('hello', amp.String()),
('optional', amp.Boolean(optional=True)),
('print', amp.Unicode(optional=True)),
('from', TransportPeer(optional=True)),
('mixedCase', amp.String(optional=True)),
('dash-arg', amp.String(optional=True)),
('underscore_arg', amp.String(optional=True))]
response = [('hello', amp.String()),
('print', amp.Unicode(optional=True))]
errors = {UnfriendlyGreeting: 'UNFRIENDLY'}
fatalErrors = {DeathThreat: 'DEAD'}
class NoAnswerHello(Hello):
commandName = Hello.commandName
requiresAnswer = False
class FutureHello(amp.Command):
commandName = 'hello'
arguments = [('hello', amp.String()),
('optional', amp.Boolean(optional=True)),
('print', amp.Unicode(optional=True)),
('from', TransportPeer(optional=True)),
('bonus', amp.String(optional=True)), # addt'l arguments
# should generally be
# added at the end, and
# be optional...
]
response = [('hello', amp.String()),
('print', amp.Unicode(optional=True))]
errors = {UnfriendlyGreeting: 'UNFRIENDLY'}
class WTF(amp.Command):
"""
An example of an invalid command.
"""
class BrokenReturn(amp.Command):
""" An example of a perfectly good command, but the handler is going to return
None...
"""
commandName = 'broken_return'
class Goodbye(amp.Command):
# commandName left blank on purpose: this tests implicit command names.
response = [('goodbye', amp.String())]
responseType = amp.QuitBox
class Howdoyoudo(amp.Command):
commandName = 'howdoyoudo'
# responseType = amp.QuitBox
class WaitForever(amp.Command):
commandName = 'wait_forever'
class GetList(amp.Command):
commandName = 'getlist'
arguments = [('length', amp.Integer())]
response = [('body', amp.AmpList([('x', amp.Integer())]))]
class DontRejectMe(amp.Command):
commandName = 'dontrejectme'
arguments = [
('magicWord', amp.Unicode()),
('list', amp.AmpList([('name', amp.Unicode())], optional=True)),
]
response = [('response', amp.Unicode())]
class SecuredPing(amp.Command):
# XXX TODO: actually make this refuse to send over an insecure connection
response = [('pinged', amp.Boolean())]
class TestSwitchProto(amp.ProtocolSwitchCommand):
commandName = 'Switch-Proto'
arguments = [
('name', amp.String()),
]
errors = {UnknownProtocol: 'UNKNOWN'}
class SingleUseFactory(protocol.ClientFactory):
def __init__(self, proto):
self.proto = proto
self.proto.factory = self
def buildProtocol(self, addr):
p, self.proto = self.proto, None
return p
reasonFailed = None
def clientConnectionFailed(self, connector, reason):
self.reasonFailed = reason
return
THING_I_DONT_UNDERSTAND = 'gwebol nargo'
class ThingIDontUnderstandError(Exception):
pass
class FactoryNotifier(amp.AMP):
factory = None
def connectionMade(self):
if self.factory is not None:
self.factory.theProto = self
if hasattr(self.factory, 'onMade'):
self.factory.onMade.callback(None)
def emitpong(self):
from twisted.internet.interfaces import ISSLTransport
if not ISSLTransport.providedBy(self.transport):
raise DeathThreat("only send secure pings over secure channels")
return {'pinged': True}
SecuredPing.responder(emitpong)
class SimpleSymmetricCommandProtocol(FactoryNotifier):
maybeLater = None
def __init__(self, onConnLost=None):
amp.AMP.__init__(self)
self.onConnLost = onConnLost
def sendHello(self, text):
return self.callRemote(Hello, hello=text)
def sendUnicodeHello(self, text, translation):
return self.callRemote(Hello, hello=text, Print=translation)
greeted = False
def cmdHello(self, hello, From, optional=None, Print=None,
mixedCase=None, dash_arg=None, underscore_arg=None):
assert From == self.transport.getPeer()
if hello == THING_I_DONT_UNDERSTAND:
raise ThingIDontUnderstandError()
if hello.startswith('fuck'):
raise UnfriendlyGreeting("Don't be a dick.")
if hello == 'die':
raise DeathThreat("aieeeeeeeee")
result = dict(hello=hello)
if Print is not None:
result.update(dict(Print=Print))
self.greeted = True
return result
Hello.responder(cmdHello)
def cmdGetlist(self, length):
return {'body': [dict(x=1)] * length}
GetList.responder(cmdGetlist)
def okiwont(self, magicWord, list=None):
if list is None:
response = u'list omitted'
else:
response = u'%s accepted' % (list[0]['name'])
return dict(response=response)
DontRejectMe.responder(okiwont)
def waitforit(self):
self.waiting = defer.Deferred()
return self.waiting
WaitForever.responder(waitforit)
def howdo(self):
return dict(howdoyoudo='world')
Howdoyoudo.responder(howdo)
def saybye(self):
return dict(goodbye="everyone")
Goodbye.responder(saybye)
def switchToTestProtocol(self, fail=False):
if fail:
name = 'no-proto'
else:
name = 'test-proto'
p = TestProto(self.onConnLost, SWITCH_CLIENT_DATA)
return self.callRemote(
TestSwitchProto,
SingleUseFactory(p), name=name).addCallback(lambda ign: p)
def switchit(self, name):
if name == 'test-proto':
return TestProto(self.onConnLost, SWITCH_SERVER_DATA)
raise UnknownProtocol(name)
TestSwitchProto.responder(switchit)
def donothing(self):
return None
BrokenReturn.responder(donothing)
class DeferredSymmetricCommandProtocol(SimpleSymmetricCommandProtocol):
def switchit(self, name):
if name == 'test-proto':
self.maybeLaterProto = TestProto(self.onConnLost, SWITCH_SERVER_DATA)
self.maybeLater = defer.Deferred()
return self.maybeLater
raise UnknownProtocol(name)
TestSwitchProto.responder(switchit)
class BadNoAnswerCommandProtocol(SimpleSymmetricCommandProtocol):
def badResponder(self, hello, From, optional=None, Print=None,
mixedCase=None, dash_arg=None, underscore_arg=None):
"""
This responder does nothing and forgets to return a dictionary.
"""
NoAnswerHello.responder(badResponder)
class NoAnswerCommandProtocol(SimpleSymmetricCommandProtocol):
def goodNoAnswerResponder(self, hello, From, optional=None, Print=None,
mixedCase=None, dash_arg=None, underscore_arg=None):
return dict(hello=hello+"-noanswer")
NoAnswerHello.responder(goodNoAnswerResponder)
def connectedServerAndClient(ServerClass=SimpleSymmetricProtocol,
ClientClass=SimpleSymmetricProtocol,
*a, **kw):
"""Returns a 3-tuple: (client, server, pump)
"""
return iosim.connectedServerAndClient(
ServerClass, ClientClass,
*a, **kw)
class TotallyDumbProtocol(protocol.Protocol):
buf = ''
def dataReceived(self, data):
self.buf += data
class LiteralAmp(amp.AMP):
def __init__(self):
self.boxes = []
def ampBoxReceived(self, box):
self.boxes.append(box)
return
class AmpBoxTests(unittest.TestCase):
"""
Test a few essential properties of AMP boxes, mostly with respect to
serialization correctness.
"""
def test_serializeStr(self):
"""
Make sure that strs serialize to strs.
"""
a = amp.AmpBox(key='value')
self.assertEqual(type(a.serialize()), str)
def test_serializeUnicodeKeyRaises(self):
"""
Verify that TypeError is raised when trying to serialize Unicode keys.
"""
a = amp.AmpBox(**{u'key': 'value'})
self.assertRaises(TypeError, a.serialize)
def test_serializeUnicodeValueRaises(self):
"""
Verify that TypeError is raised when trying to serialize Unicode
values.
"""
a = amp.AmpBox(key=u'value')
self.assertRaises(TypeError, a.serialize)
class ParsingTest(unittest.TestCase):
def test_booleanValues(self):
"""
Verify that the Boolean parser parses 'True' and 'False', but nothing
else.
"""
b = amp.Boolean()
self.assertEqual(b.fromString("True"), True)
self.assertEqual(b.fromString("False"), False)
self.assertRaises(TypeError, b.fromString, "ninja")
self.assertRaises(TypeError, b.fromString, "true")
self.assertRaises(TypeError, b.fromString, "TRUE")
self.assertEqual(b.toString(True), 'True')
self.assertEqual(b.toString(False), 'False')
def test_pathValueRoundTrip(self):
"""
Verify the 'Path' argument can parse and emit a file path.
"""
fp = filepath.FilePath(self.mktemp())
p = amp.Path()
s = p.toString(fp)
v = p.fromString(s)
self.assertNotIdentical(fp, v) # sanity check
self.assertEqual(fp, v)
def test_sillyEmptyThing(self):
"""
Test that empty boxes raise an error; they aren't supposed to be sent
on purpose.
"""
a = amp.AMP()
return self.assertRaises(amp.NoEmptyBoxes, a.ampBoxReceived, amp.Box())
def test_ParsingRoundTrip(self):
"""
Verify that various kinds of data make it through the encode/parse
round-trip unharmed.
"""
c, s, p = connectedServerAndClient(ClientClass=LiteralAmp,
ServerClass=LiteralAmp)
SIMPLE = ('simple', 'test')
CE = ('ceq', ': ')
CR = ('crtest', 'test\r')
LF = ('lftest', 'hello\n')
NEWLINE = ('newline', 'test\r\none\r\ntwo')
NEWLINE2 = ('newline2', 'test\r\none\r\n two')
BODYTEST = ('body', 'blah\r\n\r\ntesttest')
testData = [
[SIMPLE],
[SIMPLE, BODYTEST],
[SIMPLE, CE],
[SIMPLE, CR],
[SIMPLE, CE, CR, LF],
[CE, CR, LF],
[SIMPLE, NEWLINE, CE, NEWLINE2],
[BODYTEST, SIMPLE, NEWLINE]
]
for test in testData:
jb = amp.Box()
jb.update(dict(test))
jb._sendTo(c)
p.flush()
self.assertEqual(s.boxes[-1], jb)
class FakeLocator(object):
"""
This is a fake implementation of the interface implied by
L{CommandLocator}.
"""
def __init__(self):
"""
Remember the given keyword arguments as a set of responders.
"""
self.commands = {}
def locateResponder(self, commandName):
"""
Look up and return a function passed as a keyword argument of the given
name to the constructor.
"""
return self.commands[commandName]
class FakeSender:
"""
This is a fake implementation of the 'box sender' interface implied by
L{AMP}.
"""
def __init__(self):
"""
Create a fake sender and initialize the list of received boxes and
unhandled errors.
"""
self.sentBoxes = []
self.unhandledErrors = []
self.expectedErrors = 0
def expectError(self):
"""
Expect one error, so that the test doesn't fail.
"""
self.expectedErrors += 1
def sendBox(self, box):
"""
Accept a box, but don't do anything.
"""
self.sentBoxes.append(box)
def unhandledError(self, failure):
"""
Deal with failures by instantly re-raising them for easier debugging.
"""
self.expectedErrors -= 1
if self.expectedErrors < 0:
failure.raiseException()
else:
self.unhandledErrors.append(failure)
class CommandDispatchTests(unittest.TestCase):
"""
The AMP CommandDispatcher class dispatches converts AMP boxes into commands
and responses using Command.responder decorator.
Note: Originally, AMP's factoring was such that many tests for this
functionality are now implemented as full round-trip tests in L{AMPTest}.
Future tests should be written at this level instead, to ensure API
compatibility and to provide more granular, readable units of test
coverage.
"""
def setUp(self):
"""
Create a dispatcher to use.
"""
self.locator = FakeLocator()
self.sender = FakeSender()
self.dispatcher = amp.BoxDispatcher(self.locator)
self.dispatcher.startReceivingBoxes(self.sender)
def test_receivedAsk(self):
"""
L{CommandDispatcher.ampBoxReceived} should locate the appropriate
command in its responder lookup, based on the '_ask' key.
"""
received = []
def thunk(box):
received.append(box)
return amp.Box({"hello": "goodbye"})
input = amp.Box(_command="hello",
_ask="test-command-id",
hello="world")
self.locator.commands['hello'] = thunk
self.dispatcher.ampBoxReceived(input)
self.assertEqual(received, [input])
def test_sendUnhandledError(self):
"""
L{CommandDispatcher} should relay its unhandled errors in responding to
boxes to its boxSender.
"""
err = RuntimeError("something went wrong, oh no")
self.sender.expectError()
self.dispatcher.unhandledError(Failure(err))
self.assertEqual(len(self.sender.unhandledErrors), 1)
self.assertEqual(self.sender.unhandledErrors[0].value, err)
def test_unhandledSerializationError(self):
"""
Errors during serialization ought to be relayed to the sender's
unhandledError method.
"""
err = RuntimeError("something undefined went wrong")
def thunk(result):
class BrokenBox(amp.Box):
def _sendTo(self, proto):
raise err
return BrokenBox()
self.locator.commands['hello'] = thunk
input = amp.Box(_command="hello",
_ask="test-command-id",
hello="world")
self.sender.expectError()
self.dispatcher.ampBoxReceived(input)
self.assertEqual(len(self.sender.unhandledErrors), 1)
self.assertEqual(self.sender.unhandledErrors[0].value, err)
def test_callRemote(self):
"""
L{CommandDispatcher.callRemote} should emit a properly formatted '_ask'
box to its boxSender and record an outstanding L{Deferred}. When a
corresponding '_answer' packet is received, the L{Deferred} should be
fired, and the results translated via the given L{Command}'s response
de-serialization.
"""
D = self.dispatcher.callRemote(Hello, hello='world')
self.assertEqual(self.sender.sentBoxes,
[amp.AmpBox(_command="hello",
_ask="1",
hello="world")])
answers = []
D.addCallback(answers.append)
self.assertEqual(answers, [])
self.dispatcher.ampBoxReceived(amp.AmpBox({'hello': "yay",
'print': "ignored",
'_answer': "1"}))
self.assertEqual(answers, [dict(hello="yay",
Print=u"ignored")])
def _localCallbackErrorLoggingTest(self, callResult):
"""
Verify that C{callResult} completes with a C{None} result and that an
unhandled error has been logged.
"""
finalResult = []
callResult.addBoth(finalResult.append)
self.assertEqual(1, len(self.sender.unhandledErrors))
self.assertIsInstance(
self.sender.unhandledErrors[0].value, ZeroDivisionError)
self.assertEqual([None], finalResult)
def test_callRemoteSuccessLocalCallbackErrorLogging(self):
"""
If the last callback on the L{Deferred} returned by C{callRemote} (added
by application code calling C{callRemote}) fails, the failure is passed
to the sender's C{unhandledError} method.
"""
self.sender.expectError()
callResult = self.dispatcher.callRemote(Hello, hello='world')
callResult.addCallback(lambda result: 1 // 0)
self.dispatcher.ampBoxReceived(amp.AmpBox({
'hello': "yay", 'print': "ignored", '_answer': "1"}))
self._localCallbackErrorLoggingTest(callResult)
def test_callRemoteErrorLocalCallbackErrorLogging(self):
"""
Like L{test_callRemoteSuccessLocalCallbackErrorLogging}, but for the
case where the L{Deferred} returned by C{callRemote} fails.
"""
self.sender.expectError()
callResult = self.dispatcher.callRemote(Hello, hello='world')
callResult.addErrback(lambda result: 1 // 0)
self.dispatcher.ampBoxReceived(amp.AmpBox({
'_error': '1', '_error_code': 'bugs',
'_error_description': 'stuff'}))
self._localCallbackErrorLoggingTest(callResult)
class SimpleGreeting(amp.Command):
"""
A very simple greeting command that uses a few basic argument types.
"""
commandName = 'simple'
arguments = [('greeting', amp.Unicode()),
('cookie', amp.Integer())]
response = [('cookieplus', amp.Integer())]
class TestLocator(amp.CommandLocator):
"""
A locator which implements a responder to the 'simple' command.
"""
def __init__(self):
self.greetings = []
def greetingResponder(self, greeting, cookie):
self.greetings.append((greeting, cookie))
return dict(cookieplus=cookie + 3)
greetingResponder = SimpleGreeting.responder(greetingResponder)
class OverridingLocator(TestLocator):
"""
A locator which overrides the responder to the 'simple' command.
"""
def greetingResponder(self, greeting, cookie):
"""
Return a different cookieplus than L{TestLocator.greetingResponder}.
"""
self.greetings.append((greeting, cookie))
return dict(cookieplus=cookie + 4)
greetingResponder = SimpleGreeting.responder(greetingResponder)
class InheritingLocator(OverridingLocator):
"""
This locator should inherit the responder from L{OverridingLocator}.
"""
class OverrideLocatorAMP(amp.AMP):
def __init__(self):
amp.AMP.__init__(self)
self.customResponder = object()
self.expectations = {"custom": self.customResponder}
self.greetings = []
def lookupFunction(self, name):
"""
Override the deprecated lookupFunction function.
"""
if name in self.expectations:
result = self.expectations[name]
return result
else:
return super(OverrideLocatorAMP, self).lookupFunction(name)
def greetingResponder(self, greeting, cookie):
self.greetings.append((greeting, cookie))
return dict(cookieplus=cookie + 3)
greetingResponder = SimpleGreeting.responder(greetingResponder)
class CommandLocatorTests(unittest.TestCase):
"""
The CommandLocator should enable users to specify responders to commands as
functions that take structured objects, annotated with metadata.
"""
def _checkSimpleGreeting(self, locatorClass, expected):
"""
Check that a locator of type C{locatorClass} finds a responder
for command named I{simple} and that the found responder answers
with the C{expected} result to a C{SimpleGreeting<"ni hao", 5>}
command.
"""
locator = locatorClass()
responderCallable = locator.locateResponder("simple")
result = responderCallable(amp.Box(greeting="ni hao", cookie="5"))
def done(values):
self.assertEqual(values, amp.AmpBox(cookieplus=str(expected)))
return result.addCallback(done)
def test_responderDecorator(self):
"""
A method on a L{CommandLocator} subclass decorated with a L{Command}
subclass's L{responder} decorator should be returned from
locateResponder, wrapped in logic to serialize and deserialize its
arguments.
"""
return self._checkSimpleGreeting(TestLocator, 8)
def test_responderOverriding(self):
"""
L{CommandLocator} subclasses can override a responder inherited from
a base class by using the L{Command.responder} decorator to register
a new responder method.
"""
return self._checkSimpleGreeting(OverridingLocator, 9)
def test_responderInheritance(self):
"""
Responder lookup follows the same rules as normal method lookup
rules, particularly with respect to inheritance.
"""
return self._checkSimpleGreeting(InheritingLocator, 9)
def test_lookupFunctionDeprecatedOverride(self):
"""
Subclasses which override locateResponder under its old name,
lookupFunction, should have the override invoked instead. (This tests
an AMP subclass, because in the version of the code that could invoke
this deprecated code path, there was no L{CommandLocator}.)
"""
locator = OverrideLocatorAMP()
customResponderObject = self.assertWarns(
PendingDeprecationWarning,
"Override locateResponder, not lookupFunction.",
__file__, lambda : locator.locateResponder("custom"))
self.assertEqual(locator.customResponder, customResponderObject)
# Make sure upcalling works too
normalResponderObject = self.assertWarns(
PendingDeprecationWarning,
"Override locateResponder, not lookupFunction.",
__file__, lambda : locator.locateResponder("simple"))
result = normalResponderObject(amp.Box(greeting="ni hao", cookie="5"))
def done(values):
self.assertEqual(values, amp.AmpBox(cookieplus='8'))
return result.addCallback(done)
def test_lookupFunctionDeprecatedInvoke(self):
"""
Invoking locateResponder under its old name, lookupFunction, should
emit a deprecation warning, but do the same thing.
"""
locator = TestLocator()
responderCallable = self.assertWarns(
PendingDeprecationWarning,
"Call locateResponder, not lookupFunction.", __file__,
lambda : locator.lookupFunction("simple"))
result = responderCallable(amp.Box(greeting="ni hao", cookie="5"))
def done(values):
self.assertEqual(values, amp.AmpBox(cookieplus='8'))
return result.addCallback(done)
SWITCH_CLIENT_DATA = 'Success!'
SWITCH_SERVER_DATA = 'No, really. Success.'
class BinaryProtocolTests(unittest.TestCase):
"""
Tests for L{amp.BinaryBoxProtocol}.
@ivar _boxSender: After C{startReceivingBoxes} is called, the L{IBoxSender}
which was passed to it.
"""
def setUp(self):
"""
Keep track of all boxes received by this test in its capacity as an
L{IBoxReceiver} implementor.
"""
self.boxes = []
self.data = []
def startReceivingBoxes(self, sender):
"""
Implement L{IBoxReceiver.startReceivingBoxes} to just remember the
value passed in.
"""
self._boxSender = sender
def ampBoxReceived(self, box):
"""
A box was received by the protocol.
"""
self.boxes.append(box)
stopReason = None
def stopReceivingBoxes(self, reason):
"""
Record the reason that we stopped receiving boxes.
"""
self.stopReason = reason
# fake ITransport
def getPeer(self):
return 'no peer'
def getHost(self):
return 'no host'
def write(self, data):
self.data.append(data)
def test_startReceivingBoxes(self):
"""
When L{amp.BinaryBoxProtocol} is connected to a transport, it calls
C{startReceivingBoxes} on its L{IBoxReceiver} with itself as the
L{IBoxSender} parameter.
"""
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(None)
self.assertIdentical(self._boxSender, protocol)
def test_sendBoxInStartReceivingBoxes(self):
"""
The L{IBoxReceiver} which is started when L{amp.BinaryBoxProtocol} is
connected to a transport can call C{sendBox} on the L{IBoxSender}
passed to it before C{startReceivingBoxes} returns and have that box
sent.
"""
class SynchronouslySendingReceiver:
def startReceivingBoxes(self, sender):
sender.sendBox(amp.Box({'foo': 'bar'}))
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(SynchronouslySendingReceiver())
protocol.makeConnection(transport)
self.assertEqual(
transport.value(),
'\x00\x03foo\x00\x03bar\x00\x00')
def test_receiveBoxStateMachine(self):
"""
When a binary box protocol receives:
* a key
* a value
* an empty string
it should emit a box and send it to its boxReceiver.
"""
a = amp.BinaryBoxProtocol(self)
a.stringReceived("hello")
a.stringReceived("world")
a.stringReceived("")
self.assertEqual(self.boxes, [amp.AmpBox(hello="world")])
def test_firstBoxFirstKeyExcessiveLength(self):
"""
L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
the first a key it receives is larger than 255.
"""
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(transport)
protocol.dataReceived('\x01\x00')
self.assertTrue(transport.disconnecting)
def test_firstBoxSubsequentKeyExcessiveLength(self):
"""
L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
a subsequent key in the first box it receives is larger than 255.
"""
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(transport)
protocol.dataReceived('\x00\x01k\x00\x01v')
self.assertFalse(transport.disconnecting)
protocol.dataReceived('\x01\x00')
self.assertTrue(transport.disconnecting)
def test_subsequentBoxFirstKeyExcessiveLength(self):
"""
L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
the first key in a subsequent box it receives is larger than 255.
"""
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(transport)
protocol.dataReceived('\x00\x01k\x00\x01v\x00\x00')
self.assertFalse(transport.disconnecting)
protocol.dataReceived('\x01\x00')
self.assertTrue(transport.disconnecting)
def test_excessiveKeyFailure(self):
"""
If L{amp.BinaryBoxProtocol} disconnects because it received a key
length prefix which was too large, the L{IBoxReceiver}'s
C{stopReceivingBoxes} method is called with a L{TooLong} failure.
"""
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(StringTransport())
protocol.dataReceived('\x01\x00')
protocol.connectionLost(
Failure(error.ConnectionDone("simulated connection done")))
self.stopReason.trap(amp.TooLong)
self.assertTrue(self.stopReason.value.isKey)
self.assertFalse(self.stopReason.value.isLocal)
self.assertIdentical(self.stopReason.value.value, None)
self.assertIdentical(self.stopReason.value.keyName, None)
def test_unhandledErrorWithTransport(self):
"""
L{amp.BinaryBoxProtocol.unhandledError} logs the failure passed to it
and disconnects its transport.
"""
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(transport)
protocol.unhandledError(Failure(RuntimeError("Fake error")))
self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
self.assertTrue(transport.disconnecting)
def test_unhandledErrorWithoutTransport(self):
"""
L{amp.BinaryBoxProtocol.unhandledError} completes without error when
there is no associated transport.
"""
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(StringTransport())
protocol.connectionLost(Failure(Exception("Simulated")))
protocol.unhandledError(Failure(RuntimeError("Fake error")))
self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
def test_receiveBoxData(self):
"""
When a binary box protocol receives the serialized form of an AMP box,
it should emit a similar box to its boxReceiver.
"""
a = amp.BinaryBoxProtocol(self)
a.dataReceived(amp.Box({"testKey": "valueTest",
"anotherKey": "anotherValue"}).serialize())
self.assertEqual(self.boxes,
[amp.Box({"testKey": "valueTest",
"anotherKey": "anotherValue"})])
def test_receiveLongerBoxData(self):
"""
An L{amp.BinaryBoxProtocol} can receive serialized AMP boxes with
values of up to (2 ** 16 - 1) bytes.
"""
length = (2 ** 16 - 1)
value = 'x' * length
transport = StringTransport()
protocol = amp.BinaryBoxProtocol(self)
protocol.makeConnection(transport)
protocol.dataReceived(amp.Box({'k': value}).serialize())
self.assertEqual(self.boxes, [amp.Box({'k': value})])
self.assertFalse(transport.disconnecting)
def test_sendBox(self):
"""
When a binary box protocol sends a box, it should emit the serialized
bytes of that box to its transport.
"""
a = amp.BinaryBoxProtocol(self)
a.makeConnection(self)
aBox = amp.Box({"testKey": "valueTest",
"someData": "hello"})
a.makeConnection(self)
a.sendBox(aBox)
self.assertEqual(''.join(self.data), aBox.serialize())
def test_connectionLostStopSendingBoxes(self):
"""
When a binary box protocol loses its connection, it should notify its
box receiver that it has stopped receiving boxes.
"""
a = amp.BinaryBoxProtocol(self)
a.makeConnection(self)
connectionFailure = Failure(RuntimeError())
a.connectionLost(connectionFailure)
self.assertIdentical(self.stopReason, connectionFailure)
def test_protocolSwitch(self):
"""
L{BinaryBoxProtocol} has the capacity to switch to a different protocol
on a box boundary. When a protocol is in the process of switching, it
cannot receive traffic.
"""
otherProto = TestProto(None, "outgoing data")
test = self
class SwitchyReceiver:
switched = False
def startReceivingBoxes(self, sender):
pass
def ampBoxReceived(self, box):
test.assertFalse(self.switched,
"Should only receive one box!")
self.switched = True
a._lockForSwitch()
a._switchTo(otherProto)
a = amp.BinaryBoxProtocol(SwitchyReceiver())
anyOldBox = amp.Box({"include": "lots",
"of": "data"})
a.makeConnection(self)
# Include a 0-length box at the beginning of the next protocol's data,
# to make sure that AMP doesn't eat the data or try to deliver extra
# boxes either...
moreThanOneBox = anyOldBox.serialize() + "\x00\x00Hello, world!"
a.dataReceived(moreThanOneBox)
self.assertIdentical(otherProto.transport, self)
self.assertEqual("".join(otherProto.data), "\x00\x00Hello, world!")
self.assertEqual(self.data, ["outgoing data"])
a.dataReceived("more data")
self.assertEqual("".join(otherProto.data),
"\x00\x00Hello, world!more data")
self.assertRaises(amp.ProtocolSwitched, a.sendBox, anyOldBox)
def test_protocolSwitchEmptyBuffer(self):
"""
After switching to a different protocol, if no extra bytes beyond
the switch box were delivered, an empty string is not passed to the
switched protocol's C{dataReceived} method.
"""
a = amp.BinaryBoxProtocol(self)
a.makeConnection(self)
otherProto = TestProto(None, "")
a._switchTo(otherProto)
self.assertEqual(otherProto.data, [])
def test_protocolSwitchInvalidStates(self):
"""
In order to make sure the protocol never gets any invalid data sent
into the middle of a box, it must be locked for switching before it is
switched. It can only be unlocked if the switch failed, and attempting
to send a box while it is locked should raise an exception.
"""
a = amp.BinaryBoxProtocol(self)
a.makeConnection(self)
sampleBox = amp.Box({"some": "data"})
a._lockForSwitch()
self.assertRaises(amp.ProtocolSwitched, a.sendBox, sampleBox)
a._unlockFromSwitch()
a.sendBox(sampleBox)
self.assertEqual(''.join(self.data), sampleBox.serialize())
a._lockForSwitch()
otherProto = TestProto(None, "outgoing data")
a._switchTo(otherProto)
self.assertRaises(amp.ProtocolSwitched, a._unlockFromSwitch)
def test_protocolSwitchLoseConnection(self):
"""
When the protocol is switched, it should notify its nested protocol of
disconnection.
"""
class Loser(protocol.Protocol):
reason = None
def connectionLost(self, reason):
self.reason = reason
connectionLoser = Loser()
a = amp.BinaryBoxProtocol(self)
a.makeConnection(self)
a._lockForSwitch()
a._switchTo(connectionLoser)
connectionFailure = Failure(RuntimeError())
a.connectionLost(connectionFailure)
self.assertEqual(connectionLoser.reason, connectionFailure)
def test_protocolSwitchLoseClientConnection(self):
"""
When the protocol is switched, it should notify its nested client
protocol factory of disconnection.
"""
class ClientLoser:
reason = None
def clientConnectionLost(self, connector, reason):
self.reason = reason
a = amp.BinaryBoxProtocol(self)
connectionLoser = protocol.Protocol()
clientLoser = ClientLoser()
a.makeConnection(self)
a._lockForSwitch()
a._switchTo(connectionLoser, clientLoser)
connectionFailure = Failure(RuntimeError())
a.connectionLost(connectionFailure)
self.assertEqual(clientLoser.reason, connectionFailure)
class AMPTest(unittest.TestCase):
def test_interfaceDeclarations(self):
"""
The classes in the amp module ought to implement the interfaces that
are declared for their benefit.
"""
for interface, implementation in [(amp.IBoxSender, amp.BinaryBoxProtocol),
(amp.IBoxReceiver, amp.BoxDispatcher),
(amp.IResponderLocator, amp.CommandLocator),
(amp.IResponderLocator, amp.SimpleStringLocator),
(amp.IBoxSender, amp.AMP),
(amp.IBoxReceiver, amp.AMP),
(amp.IResponderLocator, amp.AMP)]:
self.failUnless(interface.implementedBy(implementation),
"%s does not implements(%s)" % (implementation, interface))
def test_helloWorld(self):
"""
Verify that a simple command can be sent and its response received with
the simple low-level string-based API.
"""
c, s, p = connectedServerAndClient()
L = []
HELLO = 'world'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_wireFormatRoundTrip(self):
"""
Verify that mixed-case, underscored and dashed arguments are mapped to
their python names properly.
"""
c, s, p = connectedServerAndClient()
L = []
HELLO = 'world'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_helloWorldUnicode(self):
"""
Verify that unicode arguments can be encoded and decoded.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
HELLO = 'world'
HELLO_UNICODE = 'wor\u1234ld'
c.sendUnicodeHello(HELLO, HELLO_UNICODE).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
self.assertEqual(L[0]['Print'], HELLO_UNICODE)
def test_callRemoteStringRequiresAnswerFalse(self):
"""
L{BoxDispatcher.callRemoteString} returns C{None} if C{requiresAnswer}
is C{False}.
"""
c, s, p = connectedServerAndClient()
ret = c.callRemoteString("WTF", requiresAnswer=False)
self.assertIdentical(ret, None)
def test_unknownCommandLow(self):
"""
Verify that unknown commands using low-level APIs will be rejected with an
error, but will NOT terminate the connection.
"""
c, s, p = connectedServerAndClient()
L = []
def clearAndAdd(e):
"""
You can't propagate the error...
"""
e.trap(amp.UnhandledCommand)
return "OK"
c.callRemoteString("WTF").addErrback(clearAndAdd).addCallback(L.append)
p.flush()
self.assertEqual(L.pop(), "OK")
HELLO = 'world'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_unknownCommandHigh(self):
"""
Verify that unknown commands using high-level APIs will be rejected with an
error, but will NOT terminate the connection.
"""
c, s, p = connectedServerAndClient()
L = []
def clearAndAdd(e):
"""
You can't propagate the error...
"""
e.trap(amp.UnhandledCommand)
return "OK"
c.callRemote(WTF).addErrback(clearAndAdd).addCallback(L.append)
p.flush()
self.assertEqual(L.pop(), "OK")
HELLO = 'world'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_brokenReturnValue(self):
"""
It can be very confusing if you write some code which responds to a
command, but gets the return value wrong. Most commonly you end up
returning None instead of a dictionary.
Verify that if that happens, the framework logs a useful error.
"""
L = []
SimpleSymmetricCommandProtocol().dispatchCommand(
amp.AmpBox(_command=BrokenReturn.commandName)).addErrback(L.append)
L[0].trap(amp.BadLocalReturn)
self.failUnlessIn('None', repr(L[0].value))
def test_unknownArgument(self):
"""
Verify that unknown arguments are ignored, and not passed to a Python
function which can't accept them.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
HELLO = 'world'
# c.sendHello(HELLO).addCallback(L.append)
c.callRemote(FutureHello,
hello=HELLO,
bonus="I'm not in the book!").addCallback(
L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_simpleReprs(self):
"""
Verify that the various Box objects repr properly, for debugging.
"""
self.assertEqual(type(repr(amp._SwitchBox('a'))), str)
self.assertEqual(type(repr(amp.QuitBox())), str)
self.assertEqual(type(repr(amp.AmpBox())), str)
self.failUnless("AmpBox" in repr(amp.AmpBox()))
def test_innerProtocolInRepr(self):
"""
Verify that L{AMP} objects output their innerProtocol when set.
"""
otherProto = TestProto(None, "outgoing data")
a = amp.AMP()
a.innerProtocol = otherProto
def fakeID(obj):
return {a: 0x1234}.get(obj, id(obj))
self.addCleanup(setIDFunction, setIDFunction(fakeID))
self.assertEqual(
repr(a), "<AMP inner <TestProto #%d> at 0x1234>" % (
otherProto.instanceId,))
def test_innerProtocolNotInRepr(self):
"""
Verify that L{AMP} objects do not output 'inner' when no innerProtocol
is set.
"""
a = amp.AMP()
def fakeID(obj):
return {a: 0x4321}.get(obj, id(obj))
self.addCleanup(setIDFunction, setIDFunction(fakeID))
self.assertEqual(repr(a), "<AMP at 0x4321>")
def test_simpleSSLRepr(self):
"""
L{amp._TLSBox.__repr__} returns a string.
"""
self.assertEqual(type(repr(amp._TLSBox())), str)
test_simpleSSLRepr.skip = skipSSL
def test_keyTooLong(self):
"""
Verify that a key that is too long will immediately raise a synchronous
exception.
"""
c, s, p = connectedServerAndClient()
x = "H" * (0xff+1)
tl = self.assertRaises(amp.TooLong,
c.callRemoteString, "Hello",
**{x: "hi"})
self.assertTrue(tl.isKey)
self.assertTrue(tl.isLocal)
self.assertIdentical(tl.keyName, None)
self.assertEqual(tl.value, x)
self.assertIn(str(len(x)), repr(tl))
self.assertIn("key", repr(tl))
def test_valueTooLong(self):
"""
Verify that attempting to send value longer than 64k will immediately
raise an exception.
"""
c, s, p = connectedServerAndClient()
x = "H" * (0xffff+1)
tl = self.assertRaises(amp.TooLong, c.sendHello, x)
p.flush()
self.failIf(tl.isKey)
self.failUnless(tl.isLocal)
self.assertEqual(tl.keyName, 'hello')
self.failUnlessIdentical(tl.value, x)
self.failUnless(str(len(x)) in repr(tl))
self.failUnless("value" in repr(tl))
self.failUnless('hello' in repr(tl))
def test_helloWorldCommand(self):
"""
Verify that a simple command can be sent and its response received with
the high-level value parsing API.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
HELLO = 'world'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L[0]['hello'], HELLO)
def test_helloErrorHandling(self):
"""
Verify that if a known error type is raised and handled, it will be
properly relayed to the other end of the connection and translated into
an exception, and no error will be logged.
"""
L=[]
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
HELLO = 'fuck you'
c.sendHello(HELLO).addErrback(L.append)
p.flush()
L[0].trap(UnfriendlyGreeting)
self.assertEqual(str(L[0].value), "Don't be a dick.")
def test_helloFatalErrorHandling(self):
"""
Verify that if a known, fatal error type is raised and handled, it will
be properly relayed to the other end of the connection and translated
into an exception, no error will be logged, and the connection will be
terminated.
"""
L=[]
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
HELLO = 'die'
c.sendHello(HELLO).addErrback(L.append)
p.flush()
L.pop().trap(DeathThreat)
c.sendHello(HELLO).addErrback(L.append)
p.flush()
L.pop().trap(error.ConnectionDone)
def test_helloNoErrorHandling(self):
"""
Verify that if an unknown error type is raised, it will be relayed to
the other end of the connection and translated into an exception, it
will be logged, and then the connection will be dropped.
"""
L=[]
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
HELLO = THING_I_DONT_UNDERSTAND
c.sendHello(HELLO).addErrback(L.append)
p.flush()
ure = L.pop()
ure.trap(amp.UnknownRemoteError)
c.sendHello(HELLO).addErrback(L.append)
cl = L.pop()
cl.trap(error.ConnectionDone)
# The exception should have been logged.
self.failUnless(self.flushLoggedErrors(ThingIDontUnderstandError))
def test_lateAnswer(self):
"""
Verify that a command that does not get answered until after the
connection terminates will not cause any errors.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
c.callRemote(WaitForever).addErrback(L.append)
p.flush()
self.assertEqual(L, [])
s.transport.loseConnection()
p.flush()
L.pop().trap(error.ConnectionDone)
# Just make sure that it doesn't error...
s.waiting.callback({})
return s.waiting
def test_requiresNoAnswer(self):
"""
Verify that a command that requires no answer is run.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
HELLO = 'world'
c.callRemote(NoAnswerHello, hello=HELLO)
p.flush()
self.failUnless(s.greeted)
def test_requiresNoAnswerFail(self):
"""
Verify that commands sent after a failed no-answer request do not complete.
"""
L=[]
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
HELLO = 'fuck you'
c.callRemote(NoAnswerHello, hello=HELLO)
p.flush()
# This should be logged locally.
self.failUnless(self.flushLoggedErrors(amp.RemoteAmpError))
HELLO = 'world'
c.callRemote(Hello, hello=HELLO).addErrback(L.append)
p.flush()
L.pop().trap(error.ConnectionDone)
self.failIf(s.greeted)
def test_noAnswerResponderBadAnswer(self):
"""
Verify that responders of requiresAnswer=False commands have to return
a dictionary anyway.
(requiresAnswer is a hint from the _client_ - the server may be called
upon to answer commands in any case, if the client wants to know when
they complete.)
"""
c, s, p = connectedServerAndClient(
ServerClass=BadNoAnswerCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
c.callRemote(NoAnswerHello, hello="hello")
p.flush()
le = self.flushLoggedErrors(amp.BadLocalReturn)
self.assertEqual(len(le), 1)
def test_noAnswerResponderAskedForAnswer(self):
"""
Verify that responders with requiresAnswer=False will actually respond
if the client sets requiresAnswer=True. In other words, verify that
requiresAnswer is a hint honored only by the client.
"""
c, s, p = connectedServerAndClient(
ServerClass=NoAnswerCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
c.callRemote(Hello, hello="Hello!").addCallback(L.append)
p.flush()
self.assertEqual(len(L), 1)
self.assertEqual(L, [dict(hello="Hello!-noanswer",
Print=None)]) # Optional response argument
def test_ampListCommand(self):
"""
Test encoding of an argument that uses the AmpList encoding.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
c.callRemote(GetList, length=10).addCallback(L.append)
p.flush()
values = L.pop().get('body')
self.assertEqual(values, [{'x': 1}] * 10)
def test_optionalAmpListOmitted(self):
"""
Sending a command with an omitted AmpList argument that is
designated as optional does not raise an InvalidSignature error.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
c.callRemote(DontRejectMe, magicWord=u'please').addCallback(L.append)
p.flush()
response = L.pop().get('response')
self.assertEqual(response, 'list omitted')
def test_optionalAmpListPresent(self):
"""
Sanity check that optional AmpList arguments are processed normally.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
c.callRemote(DontRejectMe, magicWord=u'please',
list=[{'name': 'foo'}]).addCallback(L.append)
p.flush()
response = L.pop().get('response')
self.assertEqual(response, 'foo accepted')
def test_failEarlyOnArgSending(self):
"""
Verify that if we pass an invalid argument list (omitting an argument),
an exception will be raised.
"""
self.assertRaises(amp.InvalidSignature, Hello)
def test_doubleProtocolSwitch(self):
"""
As a debugging aid, a protocol system should raise a
L{ProtocolSwitched} exception when asked to switch a protocol that is
already switched.
"""
serverDeferred = defer.Deferred()
serverProto = SimpleSymmetricCommandProtocol(serverDeferred)
clientDeferred = defer.Deferred()
clientProto = SimpleSymmetricCommandProtocol(clientDeferred)
c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
ClientClass=lambda: clientProto)
def switched(result):
self.assertRaises(amp.ProtocolSwitched, c.switchToTestProtocol)
self.testSucceeded = True
c.switchToTestProtocol().addCallback(switched)
p.flush()
self.failUnless(self.testSucceeded)
def test_protocolSwitch(self, switcher=SimpleSymmetricCommandProtocol,
spuriousTraffic=False,
spuriousError=False):
"""
Verify that it is possible to switch to another protocol mid-connection and
send data to it successfully.
"""
self.testSucceeded = False
serverDeferred = defer.Deferred()
serverProto = switcher(serverDeferred)
clientDeferred = defer.Deferred()
clientProto = switcher(clientDeferred)
c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
ClientClass=lambda: clientProto)
if spuriousTraffic:
wfdr = [] # remote
c.callRemote(WaitForever).addErrback(wfdr.append)
switchDeferred = c.switchToTestProtocol()
if spuriousTraffic:
self.assertRaises(amp.ProtocolSwitched, c.sendHello, 'world')
def cbConnsLost(((serverSuccess, serverData),
(clientSuccess, clientData))):
self.failUnless(serverSuccess)
self.failUnless(clientSuccess)
self.assertEqual(''.join(serverData), SWITCH_CLIENT_DATA)
self.assertEqual(''.join(clientData), SWITCH_SERVER_DATA)
self.testSucceeded = True
def cbSwitch(proto):
return defer.DeferredList(
[serverDeferred, clientDeferred]).addCallback(cbConnsLost)
switchDeferred.addCallback(cbSwitch)
p.flush()
if serverProto.maybeLater is not None:
serverProto.maybeLater.callback(serverProto.maybeLaterProto)
p.flush()
if spuriousTraffic:
# switch is done here; do this here to make sure that if we're
# going to corrupt the connection, we do it before it's closed.
if spuriousError:
s.waiting.errback(amp.RemoteAmpError(
"SPURIOUS",
"Here's some traffic in the form of an error."))
else:
s.waiting.callback({})
p.flush()
c.transport.loseConnection() # close it
p.flush()
self.failUnless(self.testSucceeded)
def test_protocolSwitchDeferred(self):
"""
Verify that protocol-switching even works if the value returned from
the command that does the switch is deferred.
"""
return self.test_protocolSwitch(switcher=DeferredSymmetricCommandProtocol)
def test_protocolSwitchFail(self, switcher=SimpleSymmetricCommandProtocol):
"""
Verify that if we try to switch protocols and it fails, the connection
stays up and we can go back to speaking AMP.
"""
self.testSucceeded = False
serverDeferred = defer.Deferred()
serverProto = switcher(serverDeferred)
clientDeferred = defer.Deferred()
clientProto = switcher(clientDeferred)
c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
ClientClass=lambda: clientProto)
L = []
c.switchToTestProtocol(fail=True).addErrback(L.append)
p.flush()
L.pop().trap(UnknownProtocol)
self.failIf(self.testSucceeded)
# It's a known error, so let's send a "hello" on the same connection;
# it should work.
c.sendHello('world').addCallback(L.append)
p.flush()
self.assertEqual(L.pop()['hello'], 'world')
def test_trafficAfterSwitch(self):
"""
Verify that attempts to send traffic after a switch will not corrupt
the nested protocol.
"""
return self.test_protocolSwitch(spuriousTraffic=True)
def test_errorAfterSwitch(self):
"""
Returning an error after a protocol switch should record the underlying
error.
"""
return self.test_protocolSwitch(spuriousTraffic=True,
spuriousError=True)
def test_quitBoxQuits(self):
"""
Verify that commands with a responseType of QuitBox will in fact
terminate the connection.
"""
c, s, p = connectedServerAndClient(
ServerClass=SimpleSymmetricCommandProtocol,
ClientClass=SimpleSymmetricCommandProtocol)
L = []
HELLO = 'world'
GOODBYE = 'everyone'
c.sendHello(HELLO).addCallback(L.append)
p.flush()
self.assertEqual(L.pop()['hello'], HELLO)
c.callRemote(Goodbye).addCallback(L.append)
p.flush()
self.assertEqual(L.pop()['goodbye'], GOODBYE)
c.sendHello(HELLO).addErrback(L.append)
L.pop().trap(error.ConnectionDone)
def test_basicLiteralEmit(self):
"""
Verify that the command dictionaries for a callRemoteN look correct
after being serialized and parsed.
"""
c, s, p = connectedServerAndClient()
L = []
s.ampBoxReceived = L.append
c.callRemote(Hello, hello='hello test', mixedCase='mixed case arg test',
dash_arg='x', underscore_arg='y')
p.flush()
self.assertEqual(len(L), 1)
for k, v in [('_command', Hello.commandName),
('hello', 'hello test'),
('mixedCase', 'mixed case arg test'),
('dash-arg', 'x'),
('underscore_arg', 'y')]:
self.assertEqual(L[-1].pop(k), v)
L[-1].pop('_ask')
self.assertEqual(L[-1], {})
def test_basicStructuredEmit(self):
"""
Verify that a call similar to basicLiteralEmit's is handled properly with
high-level quoting and passing to Python methods, and that argument
names are correctly handled.
"""
L = []
class StructuredHello(amp.AMP):
def h(self, *a, **k):
L.append((a, k))
return dict(hello='aaa')
Hello.responder(h)
c, s, p = connectedServerAndClient(ServerClass=StructuredHello)
c.callRemote(Hello, hello='hello test', mixedCase='mixed case arg test',
dash_arg='x', underscore_arg='y').addCallback(L.append)
p.flush()
self.assertEqual(len(L), 2)
self.assertEqual(L[0],
((), dict(
hello='hello test',
mixedCase='mixed case arg test',
dash_arg='x',
underscore_arg='y',
# XXX - should optional arguments just not be passed?
# passing None seems a little odd, looking at the way it
# turns out here... -glyph
From=('file', 'file'),
Print=None,
optional=None,
)))
self.assertEqual(L[1], dict(Print=None, hello='aaa'))
class PretendRemoteCertificateAuthority:
def checkIsPretendRemote(self):
return True
class IOSimCert:
verifyCount = 0
def options(self, *ign):
return self
def iosimVerify(self, otherCert):
"""
This isn't a real certificate, and wouldn't work on a real socket, but
iosim specifies a different API so that we don't have to do any crypto
math to demonstrate that the right functions get called in the right
places.
"""
assert otherCert is self
self.verifyCount += 1
return True
class OKCert(IOSimCert):
def options(self, x):
assert x.checkIsPretendRemote()
return self
class GrumpyCert(IOSimCert):
def iosimVerify(self, otherCert):
self.verifyCount += 1
return False
class DroppyCert(IOSimCert):
def __init__(self, toDrop):
self.toDrop = toDrop
def iosimVerify(self, otherCert):
self.verifyCount += 1
self.toDrop.loseConnection()
return True
class SecurableProto(FactoryNotifier):
factory = None
def verifyFactory(self):
return [PretendRemoteCertificateAuthority()]
def getTLSVars(self):
cert = self.certFactory()
verify = self.verifyFactory()
return dict(
tls_localCertificate=cert,
tls_verifyAuthorities=verify)
amp.StartTLS.responder(getTLSVars)
class TLSTest(unittest.TestCase):
def test_startingTLS(self):
"""
Verify that starting TLS and succeeding at handshaking sends all the
notifications to all the right places.
"""
cli, svr, p = connectedServerAndClient(
ServerClass=SecurableProto,
ClientClass=SecurableProto)
okc = OKCert()
svr.certFactory = lambda : okc
cli.callRemote(
amp.StartTLS, tls_localCertificate=okc,
tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
# let's buffer something to be delivered securely
L = []
cli.callRemote(SecuredPing).addCallback(L.append)
p.flush()
# once for client once for server
self.assertEqual(okc.verifyCount, 2)
L = []
cli.callRemote(SecuredPing).addCallback(L.append)
p.flush()
self.assertEqual(L[0], {'pinged': True})
def test_startTooManyTimes(self):
"""
Verify that the protocol will complain if we attempt to renegotiate TLS,
which we don't support.
"""
cli, svr, p = connectedServerAndClient(
ServerClass=SecurableProto,
ClientClass=SecurableProto)
okc = OKCert()
svr.certFactory = lambda : okc
cli.callRemote(amp.StartTLS,
tls_localCertificate=okc,
tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
p.flush()
cli.noPeerCertificate = True # this is totally fake
self.assertRaises(
amp.OnlyOneTLS,
cli.callRemote,
amp.StartTLS,
tls_localCertificate=okc,
tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
def test_negotiationFailed(self):
"""
Verify that starting TLS and failing on both sides at handshaking sends
notifications to all the right places and terminates the connection.
"""
badCert = GrumpyCert()
cli, svr, p = connectedServerAndClient(
ServerClass=SecurableProto,
ClientClass=SecurableProto)
svr.certFactory = lambda : badCert
cli.callRemote(amp.StartTLS,
tls_localCertificate=badCert)
p.flush()
# once for client once for server - but both fail
self.assertEqual(badCert.verifyCount, 2)
d = cli.callRemote(SecuredPing)
p.flush()
self.assertFailure(d, iosim.NativeOpenSSLError)
def test_negotiationFailedByClosing(self):
"""
Verify that starting TLS and failing by way of a lost connection
notices that it is probably an SSL problem.
"""
cli, svr, p = connectedServerAndClient(
ServerClass=SecurableProto,
ClientClass=SecurableProto)
droppyCert = DroppyCert(svr.transport)
svr.certFactory = lambda : droppyCert
cli.callRemote(amp.StartTLS, tls_localCertificate=droppyCert)
p.flush()
self.assertEqual(droppyCert.verifyCount, 2)
d = cli.callRemote(SecuredPing)
p.flush()
# it might be a good idea to move this exception somewhere more
# reasonable.
self.assertFailure(d, error.PeerVerifyError)
skip = skipSSL
class TLSNotAvailableTest(unittest.TestCase):
"""
Tests what happened when ssl is not available in current installation.
"""
def setUp(self):
"""
Disable ssl in amp.
"""
self.ssl = amp.ssl
amp.ssl = None
def tearDown(self):
"""
Restore ssl module.
"""
amp.ssl = self.ssl
def test_callRemoteError(self):
"""
Check that callRemote raises an exception when called with a
L{amp.StartTLS}.
"""
cli, svr, p = connectedServerAndClient(
ServerClass=SecurableProto,
ClientClass=SecurableProto)
okc = OKCert()
svr.certFactory = lambda : okc
return self.assertFailure(cli.callRemote(
amp.StartTLS, tls_localCertificate=okc,
tls_verifyAuthorities=[PretendRemoteCertificateAuthority()]),
RuntimeError)
def test_messageReceivedError(self):
"""
When a client with SSL enabled talks to a server without SSL, it
should return a meaningful error.
"""
svr = SecurableProto()
okc = OKCert()
svr.certFactory = lambda : okc
box = amp.Box()
box['_command'] = 'StartTLS'
box['_ask'] = '1'
boxes = []
svr.sendBox = boxes.append
svr.makeConnection(StringTransport())
svr.ampBoxReceived(box)
self.assertEqual(boxes,
[{'_error_code': 'TLS_ERROR',
'_error': '1',
'_error_description': 'TLS not available'}])
class InheritedError(Exception):
"""
This error is used to check inheritance.
"""
class OtherInheritedError(Exception):
"""
This is a distinct error for checking inheritance.
"""
class BaseCommand(amp.Command):
"""
This provides a command that will be subclassed.
"""
errors = {InheritedError: 'INHERITED_ERROR'}
class InheritedCommand(BaseCommand):
"""
This is a command which subclasses another command but does not override
anything.
"""
class AddErrorsCommand(BaseCommand):
"""
This is a command which subclasses another command but adds errors to the
list.
"""
arguments = [('other', amp.Boolean())]
errors = {OtherInheritedError: 'OTHER_INHERITED_ERROR'}
class NormalCommandProtocol(amp.AMP):
"""
This is a protocol which responds to L{BaseCommand}, and is used to test
that inheritance does not interfere with the normal handling of errors.
"""
def resp(self):
raise InheritedError()
BaseCommand.responder(resp)
class InheritedCommandProtocol(amp.AMP):
"""
This is a protocol which responds to L{InheritedCommand}, and is used to
test that inherited commands inherit their bases' errors if they do not
respond to any of their own.
"""
def resp(self):
raise InheritedError()
InheritedCommand.responder(resp)
class AddedCommandProtocol(amp.AMP):
"""
This is a protocol which responds to L{AddErrorsCommand}, and is used to
test that inherited commands can add their own new types of errors, but
still respond in the same way to their parents types of errors.
"""
def resp(self, other):
if other:
raise OtherInheritedError()
else:
raise InheritedError()
AddErrorsCommand.responder(resp)
class CommandInheritanceTests(unittest.TestCase):
"""
These tests verify that commands inherit error conditions properly.
"""
def errorCheck(self, err, proto, cmd, **kw):
"""
Check that the appropriate kind of error is raised when a given command
is sent to a given protocol.
"""
c, s, p = connectedServerAndClient(ServerClass=proto,
ClientClass=proto)
d = c.callRemote(cmd, **kw)
d2 = self.failUnlessFailure(d, err)
p.flush()
return d2
def test_basicErrorPropagation(self):
"""
Verify that errors specified in a superclass are respected normally
even if it has subclasses.
"""
return self.errorCheck(
InheritedError, NormalCommandProtocol, BaseCommand)
def test_inheritedErrorPropagation(self):
"""
Verify that errors specified in a superclass command are propagated to
its subclasses.
"""
return self.errorCheck(
InheritedError, InheritedCommandProtocol, InheritedCommand)
def test_inheritedErrorAddition(self):
"""
Verify that new errors specified in a subclass of an existing command
are honored even if the superclass defines some errors.
"""
return self.errorCheck(
OtherInheritedError, AddedCommandProtocol, AddErrorsCommand, other=True)
def test_additionWithOriginalError(self):
"""
Verify that errors specified in a command's superclass are respected
even if that command defines new errors itself.
"""
return self.errorCheck(
InheritedError, AddedCommandProtocol, AddErrorsCommand, other=False)
def _loseAndPass(err, proto):
# be specific, pass on the error to the client.
err.trap(error.ConnectionLost, error.ConnectionDone)
del proto.connectionLost
proto.connectionLost(err)
class LiveFireBase:
"""
Utility for connected reactor-using tests.
"""
def setUp(self):
"""
Create an amp server and connect a client to it.
"""
from twisted.internet import reactor
self.serverFactory = protocol.ServerFactory()
self.serverFactory.protocol = self.serverProto
self.clientFactory = protocol.ClientFactory()
self.clientFactory.protocol = self.clientProto
self.clientFactory.onMade = defer.Deferred()
self.serverFactory.onMade = defer.Deferred()
self.serverPort = reactor.listenTCP(0, self.serverFactory)
self.addCleanup(self.serverPort.stopListening)
self.clientConn = reactor.connectTCP(
'127.0.0.1', self.serverPort.getHost().port,
self.clientFactory)
self.addCleanup(self.clientConn.disconnect)
def getProtos(rlst):
self.cli = self.clientFactory.theProto
self.svr = self.serverFactory.theProto
dl = defer.DeferredList([self.clientFactory.onMade,
self.serverFactory.onMade])
return dl.addCallback(getProtos)
def tearDown(self):
"""
Cleanup client and server connections, and check the error got at
C{connectionLost}.
"""
L = []
for conn in self.cli, self.svr:
if conn.transport is not None:
# depend on amp's function connection-dropping behavior
d = defer.Deferred().addErrback(_loseAndPass, conn)
conn.connectionLost = d.errback
conn.transport.loseConnection()
L.append(d)
return defer.gatherResults(L
).addErrback(lambda first: first.value.subFailure)
def show(x):
import sys
sys.stdout.write(x+'\n')
sys.stdout.flush()
def tempSelfSigned():
from twisted.internet import ssl
sharedDN = ssl.DN(CN='shared')
key = ssl.KeyPair.generate()
cr = key.certificateRequest(sharedDN)
sscrd = key.signCertificateRequest(
sharedDN, cr, lambda dn: True, 1234567)
cert = key.newCertificate(sscrd)
return cert
if ssl is not None:
tempcert = tempSelfSigned()
class LiveFireTLSTestCase(LiveFireBase, unittest.TestCase):
clientProto = SecurableProto
serverProto = SecurableProto
def test_liveFireCustomTLS(self):
"""
Using real, live TLS, actually negotiate a connection.
This also looks at the 'peerCertificate' attribute's correctness, since
that's actually loaded using OpenSSL calls, but the main purpose is to
make sure that we didn't miss anything obvious in iosim about TLS
negotiations.
"""
cert = tempcert
self.svr.verifyFactory = lambda : [cert]
self.svr.certFactory = lambda : cert
# only needed on the server, we specify the client below.
def secured(rslt):
x = cert.digest()
def pinged(rslt2):
# Interesting. OpenSSL won't even _tell_ us about the peer
# cert until we negotiate. we should be able to do this in
# 'secured' instead, but it looks like we can't. I think this
# is a bug somewhere far deeper than here.
self.assertEqual(x, self.cli.hostCertificate.digest())
self.assertEqual(x, self.cli.peerCertificate.digest())
self.assertEqual(x, self.svr.hostCertificate.digest())
self.assertEqual(x, self.svr.peerCertificate.digest())
return self.cli.callRemote(SecuredPing).addCallback(pinged)
return self.cli.callRemote(amp.StartTLS,
tls_localCertificate=cert,
tls_verifyAuthorities=[cert]).addCallback(secured)
skip = skipSSL
class SlightlySmartTLS(SimpleSymmetricCommandProtocol):
"""
Specific implementation of server side protocol with different
management of TLS.
"""
def getTLSVars(self):
"""
@return: the global C{tempcert} certificate as local certificate.
"""
return dict(tls_localCertificate=tempcert)
amp.StartTLS.responder(getTLSVars)
class PlainVanillaLiveFire(LiveFireBase, unittest.TestCase):
clientProto = SimpleSymmetricCommandProtocol
serverProto = SimpleSymmetricCommandProtocol
def test_liveFireDefaultTLS(self):
"""
Verify that out of the box, we can start TLS to at least encrypt the
connection, even if we don't have any certificates to use.
"""
def secured(result):
return self.cli.callRemote(SecuredPing)
return self.cli.callRemote(amp.StartTLS).addCallback(secured)
skip = skipSSL
class WithServerTLSVerification(LiveFireBase, unittest.TestCase):
clientProto = SimpleSymmetricCommandProtocol
serverProto = SlightlySmartTLS
def test_anonymousVerifyingClient(self):
"""
Verify that anonymous clients can verify server certificates.
"""
def secured(result):
return self.cli.callRemote(SecuredPing)
return self.cli.callRemote(amp.StartTLS,
tls_verifyAuthorities=[tempcert]
).addCallback(secured)
skip = skipSSL
class ProtocolIncludingArgument(amp.Argument):
"""
An L{amp.Argument} which encodes its parser and serializer
arguments *including the protocol* into its parsed and serialized
forms.
"""
def fromStringProto(self, string, protocol):
"""
Don't decode anything; just return all possible information.
@return: A two-tuple of the input string and the protocol.
"""
return (string, protocol)
def toStringProto(self, obj, protocol):
"""
Encode identifying information about L{object} and protocol
into a string for later verification.
@type obj: L{object}
@type protocol: L{amp.AMP}
"""
return "%s:%s" % (id(obj), id(protocol))
class ProtocolIncludingCommand(amp.Command):
"""
A command that has argument and response schemas which use
L{ProtocolIncludingArgument}.
"""
arguments = [('weird', ProtocolIncludingArgument())]
response = [('weird', ProtocolIncludingArgument())]
class MagicSchemaCommand(amp.Command):
"""
A command which overrides L{parseResponse}, L{parseArguments}, and
L{makeResponse}.
"""
def parseResponse(self, strings, protocol):
"""
Don't do any parsing, just jam the input strings and protocol
onto the C{protocol.parseResponseArguments} attribute as a
two-tuple. Return the original strings.
"""
protocol.parseResponseArguments = (strings, protocol)
return strings
parseResponse = classmethod(parseResponse)
def parseArguments(cls, strings, protocol):
"""
Don't do any parsing, just jam the input strings and protocol
onto the C{protocol.parseArgumentsArguments} attribute as a
two-tuple. Return the original strings.
"""
protocol.parseArgumentsArguments = (strings, protocol)
return strings
parseArguments = classmethod(parseArguments)
def makeArguments(cls, objects, protocol):
"""
Don't do any serializing, just jam the input strings and protocol
onto the C{protocol.makeArgumentsArguments} attribute as a
two-tuple. Return the original strings.
"""
protocol.makeArgumentsArguments = (objects, protocol)
return objects
makeArguments = classmethod(makeArguments)
class NoNetworkProtocol(amp.AMP):
"""
An L{amp.AMP} subclass which overrides private methods to avoid
testing the network. It also provides a responder for
L{MagicSchemaCommand} that does nothing, so that tests can test
aspects of the interaction of L{amp.Command}s and L{amp.AMP}.
@ivar parseArgumentsArguments: Arguments that have been passed to any
L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
this protocol.
@ivar parseResponseArguments: Responses that have been returned from a
L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
this protocol.
@ivar makeArgumentsArguments: Arguments that have been serialized by any
L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
this protocol.
"""
def _sendBoxCommand(self, commandName, strings, requiresAnswer):
"""
Return a Deferred which fires with the original strings.
"""
return defer.succeed(strings)
MagicSchemaCommand.responder(lambda s, weird: {})
class MyBox(dict):
"""
A unique dict subclass.
"""
class ProtocolIncludingCommandWithDifferentCommandType(
ProtocolIncludingCommand):
"""
A L{ProtocolIncludingCommand} subclass whose commandType is L{MyBox}
"""
commandType = MyBox
class CommandTestCase(unittest.TestCase):
"""
Tests for L{amp.Argument} and L{amp.Command}.
"""
def test_argumentInterface(self):
"""
L{Argument} instances provide L{amp.IArgumentType}.
"""
self.assertTrue(verifyObject(amp.IArgumentType, amp.Argument()))
def test_parseResponse(self):
"""
There should be a class method of Command which accepts a
mapping of argument names to serialized forms and returns a
similar mapping whose values have been parsed via the
Command's response schema.
"""
protocol = object()
result = 'whatever'
strings = {'weird': result}
self.assertEqual(
ProtocolIncludingCommand.parseResponse(strings, protocol),
{'weird': (result, protocol)})
def test_callRemoteCallsParseResponse(self):
"""
Making a remote call on a L{amp.Command} subclass which
overrides the C{parseResponse} method should call that
C{parseResponse} method to get the response.
"""
client = NoNetworkProtocol()
thingy = "weeoo"
response = client.callRemote(MagicSchemaCommand, weird=thingy)
def gotResponse(ign):
self.assertEqual(client.parseResponseArguments,
({"weird": thingy}, client))
response.addCallback(gotResponse)
return response
def test_parseArguments(self):
"""
There should be a class method of L{amp.Command} which accepts
a mapping of argument names to serialized forms and returns a
similar mapping whose values have been parsed via the
command's argument schema.
"""
protocol = object()
result = 'whatever'
strings = {'weird': result}
self.assertEqual(
ProtocolIncludingCommand.parseArguments(strings, protocol),
{'weird': (result, protocol)})
def test_responderCallsParseArguments(self):
"""
Making a remote call on a L{amp.Command} subclass which
overrides the C{parseArguments} method should call that
C{parseArguments} method to get the arguments.
"""
protocol = NoNetworkProtocol()
responder = protocol.locateResponder(MagicSchemaCommand.commandName)
argument = object()
response = responder(dict(weird=argument))
response.addCallback(
lambda ign: self.assertEqual(protocol.parseArgumentsArguments,
({"weird": argument}, protocol)))
return response
def test_makeArguments(self):
"""
There should be a class method of L{amp.Command} which accepts
a mapping of argument names to objects and returns a similar
mapping whose values have been serialized via the command's
argument schema.
"""
protocol = object()
argument = object()
objects = {'weird': argument}
self.assertEqual(
ProtocolIncludingCommand.makeArguments(objects, protocol),
{'weird': "%d:%d" % (id(argument), id(protocol))})
def test_makeArgumentsUsesCommandType(self):
"""
L{amp.Command.makeArguments}'s return type should be the type
of the result of L{amp.Command.commandType}.
"""
protocol = object()
objects = {"weird": "whatever"}
result = ProtocolIncludingCommandWithDifferentCommandType.makeArguments(
objects, protocol)
self.assertIdentical(type(result), MyBox)
def test_callRemoteCallsMakeArguments(self):
"""
Making a remote call on a L{amp.Command} subclass which
overrides the C{makeArguments} method should call that
C{makeArguments} method to get the response.
"""
client = NoNetworkProtocol()
argument = object()
response = client.callRemote(MagicSchemaCommand, weird=argument)
def gotResponse(ign):
self.assertEqual(client.makeArgumentsArguments,
({"weird": argument}, client))
response.addCallback(gotResponse)
return response
def test_extraArgumentsDisallowed(self):
"""
L{Command.makeArguments} raises L{amp.InvalidSignature} if the objects
dictionary passed to it includes a key which does not correspond to the
Python identifier for a defined argument.
"""
self.assertRaises(
amp.InvalidSignature,
Hello.makeArguments,
dict(hello="hello", bogusArgument=object()), None)
def test_wireSpellingDisallowed(self):
"""
If a command argument conflicts with a Python keyword, the
untransformed argument name is not allowed as a key in the dictionary
passed to L{Command.makeArguments}. If it is supplied,
L{amp.InvalidSignature} is raised.
This may be a pointless implementation restriction which may be lifted.
The current behavior is tested to verify that such arguments are not
silently dropped on the floor (the previous behavior).
"""
self.assertRaises(
amp.InvalidSignature,
Hello.makeArguments,
dict(hello="required", **{"print": "print value"}),
None)
class ListOfTestsMixin:
"""
Base class for testing L{ListOf}, a parameterized zero-or-more argument
type.
@ivar elementType: Subclasses should set this to an L{Argument}
instance. The tests will make a L{ListOf} using this.
@ivar strings: Subclasses should set this to a dictionary mapping some
number of keys to the correct serialized form for some example
values. These should agree with what L{elementType}
produces/accepts.
@ivar objects: Subclasses should set this to a dictionary with the same
keys as C{strings} and with values which are the lists which should
serialize to the values in the C{strings} dictionary.
"""
def test_toBox(self):
"""
L{ListOf.toBox} extracts the list of objects from the C{objects}
dictionary passed to it, using the C{name} key also passed to it,
serializes each of the elements in that list using the L{Argument}
instance previously passed to its initializer, combines the serialized
results, and inserts the result into the C{strings} dictionary using
the same C{name} key.
"""
stringList = amp.ListOf(self.elementType)
strings = amp.AmpBox()
for key in self.objects:
stringList.toBox(key, strings, self.objects.copy(), None)
self.assertEqual(strings, self.strings)
def test_fromBox(self):
"""
L{ListOf.fromBox} reverses the operation performed by L{ListOf.toBox}.
"""
stringList = amp.ListOf(self.elementType)
objects = {}
for key in self.strings:
stringList.fromBox(key, self.strings.copy(), objects, None)
self.assertEqual(objects, self.objects)
class ListOfStringsTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.String}.
"""
elementType = amp.String()
strings = {
"empty": "",
"single": "\x00\x03foo",
"multiple": "\x00\x03bar\x00\x03baz\x00\x04quux"}
objects = {
"empty": [],
"single": ["foo"],
"multiple": ["bar", "baz", "quux"]}
class ListOfIntegersTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.Integer}.
"""
elementType = amp.Integer()
huge = (
9999999999999999999999999999999999999999999999999999999999 *
9999999999999999999999999999999999999999999999999999999999)
strings = {
"empty": "",
"single": "\x00\x0210",
"multiple": "\x00\x011\x00\x0220\x00\x03500",
"huge": "\x00\x74%d" % (huge,),
"negative": "\x00\x02-1"}
objects = {
"empty": [],
"single": [10],
"multiple": [1, 20, 500],
"huge": [huge],
"negative": [-1]}
class ListOfUnicodeTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.Unicode}.
"""
elementType = amp.Unicode()
strings = {
"empty": "",
"single": "\x00\x03foo",
"multiple": "\x00\x03\xe2\x98\x83\x00\x05Hello\x00\x05world"}
objects = {
"empty": [],
"single": [u"foo"],
"multiple": [u"\N{SNOWMAN}", u"Hello", u"world"]}
class ListOfDecimalTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.Decimal}.
"""
elementType = amp.Decimal()
strings = {
"empty": "",
"single": "\x00\x031.1",
"extreme": "\x00\x08Infinity\x00\x09-Infinity",
"scientist": "\x00\x083.141E+5\x00\x0a0.00003141\x00\x083.141E-7"
"\x00\x09-3.141E+5\x00\x0b-0.00003141\x00\x09-3.141E-7",
"engineer": "\x00\x04%s\x00\x06%s" % (
decimal.Decimal("0e6").to_eng_string(),
decimal.Decimal("1.5E-9").to_eng_string()),
}
objects = {
"empty": [],
"single": [decimal.Decimal("1.1")],
"extreme": [
decimal.Decimal("Infinity"),
decimal.Decimal("-Infinity"),
],
# exarkun objected to AMP supporting engineering notation because
# it was redundant, until we realised that 1E6 has less precision
# than 1000000 and is represented differently. But they compare
# and even hash equally. There were tears.
"scientist": [
decimal.Decimal("3.141E5"),
decimal.Decimal("3.141e-5"),
decimal.Decimal("3.141E-7"),
decimal.Decimal("-3.141e5"),
decimal.Decimal("-3.141E-5"),
decimal.Decimal("-3.141e-7"),
],
"engineer": [
decimal.Decimal("0e6"),
decimal.Decimal("1.5E-9"),
],
}
class ListOfDecimalNanTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.Decimal} for not-a-number values.
"""
elementType = amp.Decimal()
strings = {
"nan": "\x00\x03NaN\x00\x04-NaN\x00\x04sNaN\x00\x05-sNaN",
}
objects = {
"nan": [
decimal.Decimal("NaN"),
decimal.Decimal("-NaN"),
decimal.Decimal("sNaN"),
decimal.Decimal("-sNaN"),
]
}
def test_fromBox(self):
"""
L{ListOf.fromBox} reverses the operation performed by L{ListOf.toBox}.
"""
# Helpers. Decimal.is_{qnan,snan,signed}() are new in 2.6 (or 2.5.2,
# but who's counting).
def is_qnan(decimal):
return 'NaN' in str(decimal) and 'sNaN' not in str(decimal)
def is_snan(decimal):
return 'sNaN' in str(decimal)
def is_signed(decimal):
return '-' in str(decimal)
# NaN values have unusual equality semantics, so this method is
# overridden to compare the resulting objects in a way which works with
# NaNs.
stringList = amp.ListOf(self.elementType)
objects = {}
for key in self.strings:
stringList.fromBox(key, self.strings.copy(), objects, None)
n = objects["nan"]
self.assertTrue(is_qnan(n[0]) and not is_signed(n[0]))
self.assertTrue(is_qnan(n[1]) and is_signed(n[1]))
self.assertTrue(is_snan(n[2]) and not is_signed(n[2]))
self.assertTrue(is_snan(n[3]) and is_signed(n[3]))
class DecimalTests(unittest.TestCase):
"""
Tests for L{amp.Decimal}.
"""
def test_nonDecimal(self):
"""
L{amp.Decimal.toString} raises L{ValueError} if passed an object which
is not an instance of C{decimal.Decimal}.
"""
argument = amp.Decimal()
self.assertRaises(ValueError, argument.toString, "1.234")
self.assertRaises(ValueError, argument.toString, 1.234)
self.assertRaises(ValueError, argument.toString, 1234)
class ListOfDateTimeTests(unittest.TestCase, ListOfTestsMixin):
"""
Tests for L{ListOf} combined with L{amp.DateTime}.
"""
elementType = amp.DateTime()
strings = {
"christmas":
"\x00\x202010-12-25T00:00:00.000000-00:00"
"\x00\x202010-12-25T00:00:00.000000-00:00",
"christmas in eu": "\x00\x202010-12-25T00:00:00.000000+01:00",
"christmas in iran": "\x00\x202010-12-25T00:00:00.000000+03:30",
"christmas in nyc": "\x00\x202010-12-25T00:00:00.000000-05:00",
"previous tests": "\x00\x202010-12-25T00:00:00.000000+03:19"
"\x00\x202010-12-25T00:00:00.000000-06:59",
}
objects = {
"christmas": [
datetime.datetime(2010, 12, 25, 0, 0, 0, tzinfo=amp.utc),
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('+', 0, 0)),
],
"christmas in eu": [
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('+', 1, 0)),
],
"christmas in iran": [
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('+', 3, 30)),
],
"christmas in nyc": [
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('-', 5, 0)),
],
"previous tests": [
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('+', 3, 19)),
datetime.datetime(2010, 12, 25, 0, 0, 0,
tzinfo=amp._FixedOffsetTZInfo('-', 6, 59)),
],
}
class ListOfOptionalTests(unittest.TestCase):
"""
Tests to ensure L{ListOf} AMP arguments can be omitted from AMP commands
via the 'optional' flag.
"""
def test_requiredArgumentWithNoneValueRaisesTypeError(self):
"""
L{ListOf.toBox} raises C{TypeError} when passed a value of C{None}
for the argument.
"""
stringList = amp.ListOf(amp.Integer())
self.assertRaises(
TypeError, stringList.toBox, 'omitted', amp.AmpBox(),
{'omitted': None}, None)
def test_optionalArgumentWithNoneValueOmitted(self):
"""
L{ListOf.toBox} silently omits serializing any argument with a
value of C{None} that is designated as optional for the protocol.
"""
stringList = amp.ListOf(amp.Integer(), optional=True)
strings = amp.AmpBox()
stringList.toBox('omitted', strings, {'omitted': None}, None)
self.assertEqual(strings, {})
def test_requiredArgumentWithKeyMissingRaisesKeyError(self):
"""
L{ListOf.toBox} raises C{KeyError} if the argument's key is not
present in the objects dictionary.
"""
stringList = amp.ListOf(amp.Integer())
self.assertRaises(
KeyError, stringList.toBox, 'ommited', amp.AmpBox(),
{'someOtherKey': 0}, None)
def test_optionalArgumentWithKeyMissingOmitted(self):
"""
L{ListOf.toBox} silently omits serializing any argument designated
as optional whose key is not present in the objects dictionary.
"""
stringList = amp.ListOf(amp.Integer(), optional=True)
stringList.toBox('ommited', amp.AmpBox(), {'someOtherKey': 0}, None)
def test_omittedOptionalArgumentDeserializesAsNone(self):
"""
L{ListOf.fromBox} correctly reverses the operation performed by
L{ListOf.toBox} for optional arguments.
"""
stringList = amp.ListOf(amp.Integer(), optional=True)
objects = {}
stringList.fromBox('omitted', {}, objects, None)
self.assertEqual(objects, {'omitted': None})
class UNIXStringTransport(object):
"""
An in-memory implementation of L{interfaces.IUNIXTransport} which collects
all data given to it for later inspection.
@ivar _queue: A C{list} of the data which has been given to this transport,
eg via C{write} or C{sendFileDescriptor}. Elements are two-tuples of a
string (identifying the destination of the data) and the data itself.
"""
implements(interfaces.IUNIXTransport)
def __init__(self, descriptorFuzz):
"""
@param descriptorFuzz: An offset to apply to descriptors.
@type descriptorFuzz: C{int}
"""
self._fuzz = descriptorFuzz
self._queue = []
def sendFileDescriptor(self, descriptor):
self._queue.append((
'fileDescriptorReceived', descriptor + self._fuzz))
def write(self, data):
self._queue.append(('dataReceived', data))
def writeSequence(self, seq):
for data in seq:
self.write(data)
def loseConnection(self):
self._queue.append(('connectionLost', Failure(ConnectionLost())))
def getHost(self):
return UNIXAddress('/tmp/some-path')
def getPeer(self):
return UNIXAddress('/tmp/another-path')
# Minimal evidence that we got the signatures right
verifyClass(interfaces.ITransport, UNIXStringTransport)
verifyClass(interfaces.IUNIXTransport, UNIXStringTransport)
class DescriptorTests(unittest.TestCase):
"""
Tests for L{amp.Descriptor}, an argument type for passing a file descriptor
over an AMP connection over a UNIX domain socket.
"""
def setUp(self):
self.fuzz = 3
self.transport = UNIXStringTransport(descriptorFuzz=self.fuzz)
self.protocol = amp.BinaryBoxProtocol(
amp.BoxDispatcher(amp.CommandLocator()))
self.protocol.makeConnection(self.transport)
def test_fromStringProto(self):
"""
L{Descriptor.fromStringProto} constructs a file descriptor value by
extracting a previously received file descriptor corresponding to the
wire value of the argument from the L{_DescriptorExchanger} state of the
protocol passed to it.
This is a whitebox test which involves direct L{_DescriptorExchanger}
state inspection.
"""
argument = amp.Descriptor()
self.protocol.fileDescriptorReceived(5)
self.protocol.fileDescriptorReceived(3)
self.protocol.fileDescriptorReceived(1)
self.assertEqual(
5, argument.fromStringProto("0", self.protocol))
self.assertEqual(
3, argument.fromStringProto("1", self.protocol))
self.assertEqual(
1, argument.fromStringProto("2", self.protocol))
self.assertEqual({}, self.protocol._descriptors)
def test_toStringProto(self):
"""
To send a file descriptor, L{Descriptor.toStringProto} uses the
L{IUNIXTransport.sendFileDescriptor} implementation of the transport of
the protocol passed to it to copy the file descriptor. Each subsequent
descriptor sent over a particular AMP connection is assigned the next
integer value, starting from 0. The base ten string representation of
this value is the byte encoding of the argument.
This is a whitebox test which involves direct L{_DescriptorExchanger}
state inspection and mutation.
"""
argument = amp.Descriptor()
self.assertEqual("0", argument.toStringProto(2, self.protocol))
self.assertEqual(
("fileDescriptorReceived", 2 + self.fuzz), self.transport._queue.pop(0))
self.assertEqual("1", argument.toStringProto(4, self.protocol))
self.assertEqual(
("fileDescriptorReceived", 4 + self.fuzz), self.transport._queue.pop(0))
self.assertEqual("2", argument.toStringProto(6, self.protocol))
self.assertEqual(
("fileDescriptorReceived", 6 + self.fuzz), self.transport._queue.pop(0))
self.assertEqual({}, self.protocol._descriptors)
def test_roundTrip(self):
"""
L{amp.Descriptor.fromBox} can interpret an L{amp.AmpBox} constructed by
L{amp.Descriptor.toBox} to reconstruct a file descriptor value.
"""
name = "alpha"
strings = {}
descriptor = 17
sendObjects = {name: descriptor}
argument = amp.Descriptor()
argument.toBox(name, strings, sendObjects.copy(), self.protocol)
receiver = amp.BinaryBoxProtocol(
amp.BoxDispatcher(amp.CommandLocator()))
for event in self.transport._queue:
getattr(receiver, event[0])(*event[1:])
receiveObjects = {}
argument.fromBox(name, strings.copy(), receiveObjects, receiver)
# Make sure we got the descriptor. Adjust by fuzz to be more convincing
# of having gone through L{IUNIXTransport.sendFileDescriptor}, not just
# converted to a string and then parsed back into an integer.
self.assertEqual(descriptor + self.fuzz, receiveObjects[name])
class DateTimeTests(unittest.TestCase):
"""
Tests for L{amp.DateTime}, L{amp._FixedOffsetTZInfo}, and L{amp.utc}.
"""
string = '9876-01-23T12:34:56.054321-01:23'
tzinfo = amp._FixedOffsetTZInfo('-', 1, 23)
object = datetime.datetime(9876, 1, 23, 12, 34, 56, 54321, tzinfo)
def test_invalidString(self):
"""
L{amp.DateTime.fromString} raises L{ValueError} when passed a string
which does not represent a timestamp in the proper format.
"""
d = amp.DateTime()
self.assertRaises(ValueError, d.fromString, 'abc')
def test_invalidDatetime(self):
"""
L{amp.DateTime.toString} raises L{ValueError} when passed a naive
datetime (a datetime with no timezone information).
"""
d = amp.DateTime()
self.assertRaises(ValueError, d.toString,
datetime.datetime(2010, 12, 25, 0, 0, 0))
def test_fromString(self):
"""
L{amp.DateTime.fromString} returns a C{datetime.datetime} with all of
its fields populated from the string passed to it.
"""
argument = amp.DateTime()
value = argument.fromString(self.string)
self.assertEqual(value, self.object)
def test_toString(self):
"""
L{amp.DateTime.toString} returns a C{str} in the wire format including
all of the information from the C{datetime.datetime} passed into it,
including the timezone offset.
"""
argument = amp.DateTime()
value = argument.toString(self.object)
self.assertEqual(value, self.string)
class FixedOffsetTZInfoTests(unittest.TestCase):
"""
Tests for L{amp._FixedOffsetTZInfo} and L{amp.utc}.
"""
def test_tzname(self):
"""
L{amp.utc.tzname} returns C{"+00:00"}.
"""
self.assertEqual(amp.utc.tzname(None), '+00:00')
def test_dst(self):
"""
L{amp.utc.dst} returns a zero timedelta.
"""
self.assertEqual(amp.utc.dst(None), datetime.timedelta(0))
def test_utcoffset(self):
"""
L{amp.utc.utcoffset} returns a zero timedelta.
"""
self.assertEqual(amp.utc.utcoffset(None), datetime.timedelta(0))
def test_badSign(self):
"""
L{amp._FixedOffsetTZInfo} raises L{ValueError} if passed an offset sign
other than C{'+'} or C{'-'}.
"""
self.assertRaises(ValueError, amp._FixedOffsetTZInfo, '?', 0, 0)
if not interfaces.IReactorSSL.providedBy(reactor):
skipMsg = 'This test case requires SSL support in the reactor'
TLSTest.skip = skipMsg
LiveFireTLSTestCase.skip = skipMsg
PlainVanillaLiveFire.skip = skipMsg
WithServerTLSVerification.skip = skipMsg
|
{
"content_hash": "fcefb3e944c927d2f7a841ad4ecc96d7",
"timestamp": "",
"source": "github",
"line_count": 3174,
"max_line_length": 91,
"avg_line_length": 33.15595463137996,
"alnum_prop": 0.6156769957334398,
"repo_name": "biddisco/VTK",
"id": "325af405af4f8e4983ed47bddbad6784760578da",
"size": "105344",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "ThirdParty/Twisted/twisted/test/test_amp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "C",
"bytes": "45542302"
},
{
"name": "C++",
"bytes": "60467840"
},
{
"name": "CSS",
"bytes": "157961"
},
{
"name": "Cuda",
"bytes": "28721"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "IDL",
"bytes": "4406"
},
{
"name": "Java",
"bytes": "184678"
},
{
"name": "JavaScript",
"bytes": "978324"
},
{
"name": "Objective-C",
"bytes": "121232"
},
{
"name": "Objective-C++",
"bytes": "101052"
},
{
"name": "Pascal",
"bytes": "3255"
},
{
"name": "Perl",
"bytes": "177007"
},
{
"name": "Python",
"bytes": "13262355"
},
{
"name": "Shell",
"bytes": "41929"
},
{
"name": "Tcl",
"bytes": "1894036"
}
],
"symlink_target": ""
}
|
import os
import vtk
import vtk.test.Testing
import math
class TestParallelCoordinatesColors(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if colored parallel coordinates plots can be built with python"
# Set up a 2D scene, add a PC chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0, 1.0, 1.0)
view.GetRenderWindow().SetSize(600,300)
chart = vtk.vtkChartParallelCoordinates()
view.GetScene().AddItem(chart)
# Create a table with some points in it
arrX = vtk.vtkFloatArray()
arrX.SetName("XAxis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Tan")
numPoints = 200
inc = 7.5 / (numPoints-1)
for i in range(numPoints):
arrX.InsertNextValue(i * inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.tan(i * inc) + 0.5)
table = vtk.vtkTable()
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Create blue to gray to red lookup table
lut = vtk.vtkLookupTable()
lutNum = 256
lut.SetNumberOfTableValues(lutNum)
lut.Build()
ctf = vtk.vtkColorTransferFunction()
ctf.SetColorSpaceToDiverging()
cl = []
# Variant of Colorbrewer RdBu 5
cl.append([float(cc)/255.0 for cc in [202, 0, 32]])
cl.append([float(cc)/255.0 for cc in [244, 165, 130]])
cl.append([float(cc)/255.0 for cc in [140, 140, 140]])
cl.append([float(cc)/255.0 for cc in [146, 197, 222]])
cl.append([float(cc)/255.0 for cc in [5, 113, 176]])
vv = [float(xx)/float(len(cl)-1) for xx in range(len(cl))]
vv.reverse()
for pt,color in zip(vv,cl):
ctf.AddRGBPoint(pt, color[0], color[1], color[2])
for ii,ss in enumerate([float(xx)/float(lutNum) for xx in range(lutNum)]):
cc = ctf.GetColor(ss)
lut.SetTableValue(ii,cc[0],cc[1],cc[2],1.0)
lut.SetAlpha(0.25)
lut.SetRange(-1, 1)
chart.GetPlot(0).SetInputData(table)
chart.GetPlot(0).SetScalarVisibility(1)
chart.GetPlot(0).SetLookupTable(lut)
chart.GetPlot(0).SelectColorArray("Cosine")
view.GetRenderWindow().SetMultiSamples(0)
view.GetRenderWindow().Render()
img_file = "TestParallelCoordinatesColors.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestParallelCoordinatesColors, 'test')])
|
{
"content_hash": "d4c2ba08dcbfe90a4440cd9bb7892f14",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 117,
"avg_line_length": 34.47126436781609,
"alnum_prop": 0.5875291763921308,
"repo_name": "hlzz/dotfiles",
"id": "e5e25f871669491b0389ec155256525745ea34ad",
"size": "3047",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graphics/VTK-7.0.0/Charts/Core/Testing/Python/TestParallelCoordinatesColors.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "1240"
},
{
"name": "Arc",
"bytes": "38"
},
{
"name": "Assembly",
"bytes": "449468"
},
{
"name": "Batchfile",
"bytes": "16152"
},
{
"name": "C",
"bytes": "102303195"
},
{
"name": "C++",
"bytes": "155056606"
},
{
"name": "CMake",
"bytes": "7200627"
},
{
"name": "CSS",
"bytes": "179330"
},
{
"name": "Cuda",
"bytes": "30026"
},
{
"name": "D",
"bytes": "2152"
},
{
"name": "Emacs Lisp",
"bytes": "14892"
},
{
"name": "FORTRAN",
"bytes": "5276"
},
{
"name": "Forth",
"bytes": "3637"
},
{
"name": "GAP",
"bytes": "14495"
},
{
"name": "GLSL",
"bytes": "438205"
},
{
"name": "Gnuplot",
"bytes": "327"
},
{
"name": "Groff",
"bytes": "518260"
},
{
"name": "HLSL",
"bytes": "965"
},
{
"name": "HTML",
"bytes": "2003175"
},
{
"name": "Haskell",
"bytes": "10370"
},
{
"name": "IDL",
"bytes": "2466"
},
{
"name": "Java",
"bytes": "219109"
},
{
"name": "JavaScript",
"bytes": "1618007"
},
{
"name": "Lex",
"bytes": "119058"
},
{
"name": "Lua",
"bytes": "23167"
},
{
"name": "M",
"bytes": "1080"
},
{
"name": "M4",
"bytes": "292475"
},
{
"name": "Makefile",
"bytes": "7112810"
},
{
"name": "Matlab",
"bytes": "1582"
},
{
"name": "NSIS",
"bytes": "34176"
},
{
"name": "Objective-C",
"bytes": "65312"
},
{
"name": "Objective-C++",
"bytes": "269995"
},
{
"name": "PAWN",
"bytes": "4107117"
},
{
"name": "PHP",
"bytes": "2690"
},
{
"name": "Pascal",
"bytes": "5054"
},
{
"name": "Perl",
"bytes": "485508"
},
{
"name": "Pike",
"bytes": "1338"
},
{
"name": "Prolog",
"bytes": "5284"
},
{
"name": "Python",
"bytes": "16799659"
},
{
"name": "QMake",
"bytes": "89858"
},
{
"name": "Rebol",
"bytes": "291"
},
{
"name": "Ruby",
"bytes": "21590"
},
{
"name": "Scilab",
"bytes": "120244"
},
{
"name": "Shell",
"bytes": "2266191"
},
{
"name": "Slash",
"bytes": "1536"
},
{
"name": "Smarty",
"bytes": "1368"
},
{
"name": "Swift",
"bytes": "331"
},
{
"name": "Tcl",
"bytes": "1911873"
},
{
"name": "TeX",
"bytes": "11981"
},
{
"name": "Verilog",
"bytes": "3893"
},
{
"name": "VimL",
"bytes": "595114"
},
{
"name": "XSLT",
"bytes": "62675"
},
{
"name": "Yacc",
"bytes": "307000"
},
{
"name": "eC",
"bytes": "366863"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
from enum import Enum
from cryptography import x509
from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.utils import _check_byteslike
def load_pem_pkcs7_certificates(data):
backend = _get_backend(None)
return backend.load_pem_pkcs7_certificates(data)
def load_der_pkcs7_certificates(data):
backend = _get_backend(None)
return backend.load_der_pkcs7_certificates(data)
class PKCS7SignatureBuilder(object):
def __init__(self, data=None, signers=[], additional_certs=[]):
self._data = data
self._signers = signers
self._additional_certs = additional_certs
def set_data(self, data):
_check_byteslike("data", data)
if self._data is not None:
raise ValueError("data may only be set once")
return PKCS7SignatureBuilder(data, self._signers)
def add_signer(self, certificate, private_key, hash_algorithm):
if not isinstance(
hash_algorithm,
(
hashes.SHA1,
hashes.SHA224,
hashes.SHA256,
hashes.SHA384,
hashes.SHA512,
),
):
raise TypeError(
"hash_algorithm must be one of hashes.SHA1, SHA224, "
"SHA256, SHA384, or SHA512"
)
if not isinstance(certificate, x509.Certificate):
raise TypeError("certificate must be a x509.Certificate")
if not isinstance(
private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
):
raise TypeError("Only RSA & EC keys are supported at this time.")
return PKCS7SignatureBuilder(
self._data,
self._signers + [(certificate, private_key, hash_algorithm)],
)
def add_certificate(self, certificate):
if not isinstance(certificate, x509.Certificate):
raise TypeError("certificate must be a x509.Certificate")
return PKCS7SignatureBuilder(
self._data, self._signers, self._additional_certs + [certificate]
)
def sign(self, encoding, options, backend=None):
if len(self._signers) == 0:
raise ValueError("Must have at least one signer")
if self._data is None:
raise ValueError("You must add data to sign")
options = list(options)
if not all(isinstance(x, PKCS7Options) for x in options):
raise ValueError("options must be from the PKCS7Options enum")
if encoding not in (
serialization.Encoding.PEM,
serialization.Encoding.DER,
serialization.Encoding.SMIME,
):
raise ValueError(
"Must be PEM, DER, or SMIME from the Encoding enum"
)
# Text is a meaningless option unless it is accompanied by
# DetachedSignature
if (
PKCS7Options.Text in options
and PKCS7Options.DetachedSignature not in options
):
raise ValueError(
"When passing the Text option you must also pass "
"DetachedSignature"
)
if PKCS7Options.Text in options and encoding in (
serialization.Encoding.DER,
serialization.Encoding.PEM,
):
raise ValueError(
"The Text option is only available for SMIME serialization"
)
# No attributes implies no capabilities so we'll error if you try to
# pass both.
if (
PKCS7Options.NoAttributes in options
and PKCS7Options.NoCapabilities in options
):
raise ValueError(
"NoAttributes is a superset of NoCapabilities. Do not pass "
"both values."
)
backend = _get_backend(backend)
return backend.pkcs7_sign(self, encoding, options)
class PKCS7Options(Enum):
Text = "Add text/plain MIME type"
Binary = "Don't translate input data into canonical MIME format"
DetachedSignature = "Don't embed data in the PKCS7 structure"
NoCapabilities = "Don't embed SMIME capabilities"
NoAttributes = "Don't embed authenticatedAttributes"
NoCerts = "Don't embed signer certificate"
|
{
"content_hash": "b88815c85e6bc070d012af0649b032d3",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 77,
"avg_line_length": 34.71875,
"alnum_prop": 0.6147614761476148,
"repo_name": "kimjinyong/i2nsf-framework",
"id": "1e11e28ef5b35c9d179031bfd73aaf180e4f68ed",
"size": "4625",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Hackathon-112/analyzer/.local/lib/python3.5/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "4396520"
},
{
"name": "C++",
"bytes": "9389"
},
{
"name": "CSS",
"bytes": "51736"
},
{
"name": "Dockerfile",
"bytes": "3839"
},
{
"name": "Emacs Lisp",
"bytes": "24812"
},
{
"name": "Erlang",
"bytes": "1364078"
},
{
"name": "HTML",
"bytes": "42486541"
},
{
"name": "Hack",
"bytes": "6349"
},
{
"name": "Java",
"bytes": "7976"
},
{
"name": "JavaScript",
"bytes": "533000"
},
{
"name": "Makefile",
"bytes": "401170"
},
{
"name": "PHP",
"bytes": "164007"
},
{
"name": "Perl",
"bytes": "2188"
},
{
"name": "Python",
"bytes": "3004949"
},
{
"name": "QMake",
"bytes": "360"
},
{
"name": "Roff",
"bytes": "3906372"
},
{
"name": "Shell",
"bytes": "83872"
},
{
"name": "XSLT",
"bytes": "167018"
}
],
"symlink_target": ""
}
|
from scipy.fftpack import fft as FFT
from scipy.fftpack import dct as DCT
import numpy as np
def mfcc(framewiseData, order = 60, samplerate = 48000, fftLen = None, low = 0, high = None):
""" Get the mel-frequency cepstral coefficients for the give data
Calculates the MFCCs of the data, it is assumed that the data is unbiased and pre-emphasised
Parameters
----------
framewiseData: numpy ndarray
data to calculate mfccs for, each row is one frame
order: int, optional
number of MFCCs to calculate, default 60
samplerate: float, optional
sample rate of the source audio in Hz, default 48000
fftLen: int, optional
length of fft used for calculations, default size of frame
low: float, optional
lowest frequency for fft bins in Hz, default 0
high: float, optional
highest frequency for fft bins in Hz, default samplerate / 2
Returns
-------
numpy ndarray
mfccs, each row is one frame
Raises
------
ValueError
"""
samplerate = float(samplerate)
low = float(low)
if high is None:
high = samplerate / 2 # niquest
high = float(high)
if fftLen is None:
fftLen = framewiseData.shape[1]
if float(fftLen).is_integer():
fftLen = int(fftLen)
else:
raise ValueError('FFT Length is not an integer')
spectrum = powerSpectrum(framewiseData, fftLen)
filters = filterBank(order, low, high, fftLen, samplerate)
# TODO: apply lifter
mfccs = np.log(np.dot(spectrum, filters))
mfccs = DCT(mfccs, type=2, norm='ortho')
return mfccs
def filterBank(order, low, high, fftLen, samplerate):
""" Create a triangular window filter bank """
centrePoints = fromMel(np.linspace(toMel(low), toMel(high), order + 2))
centrePoints = np.round(fftLen*centrePoints/samplerate)
bank = np.zeros((order, fftLen/2))
for o in range(order):
bank[o, centrePoints[o]:centrePoints[o+1]] = np.linspace(0, 1.0, centrePoints[o+1] - centrePoints[o] + 1)[1:]
bank[o, centrePoints[o+1]:centrePoints[o+2]] = np.linspace(1.0, 0.0, centrePoints[o+2] - centrePoints[o+1] + 1)[:-1]
return bank.T
def powerSpectrum(data, fftLen):
""" Calculate the framewise one tail power spectrum """
fftLen = int(fftLen)
return np.absolute(FFT(data,axis=1,n=fftLen)[:,fftLen/2:])
def toMel(x):
""" Converts x from Hz to mel-scale """
return 2595*np.log10(1+x/700)
def fromMel(x):
""" Converts x from mel-scale to Hz """
return 700*(10**(x/2595.0)-1)
|
{
"content_hash": "bf40128b0db00ec8d92685a5c4ec9acf",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 124,
"avg_line_length": 33.44303797468354,
"alnum_prop": 0.6328538985616957,
"repo_name": "dabraude/PYSpeechLib",
"id": "93f5148460107d9b1747834c63c1cb78e4852569",
"size": "2667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/algorithms/mfcc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "40417"
}
],
"symlink_target": ""
}
|
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from oauth2app.models import Client, AccessRange
from django.contrib.admin.views.decorators import staff_member_required
#from datastoreUtils import *
from apps.questions.models import Script
import pymongo
import json
def getquestion(request):
question_template = request.GET.get('template')
template = {}
return render_to_response('questions/'+question_template,
template,
RequestContext(request))
def getandroidquestion(request):
question_template = request.GET.get('template')
template = {}
return render_to_response('questions/android/'+question_template,
template,
RequestContext(request))
def update(request):
s = None
try:
s = Script.objects.get(id=request.POST.get('id'))
s.code = request.POST.get('code')
except:
s = Script(name=request.POST.get('name'), code=request.POST.get('code'))
s.save()
return HttpResponse(request.body, 'application/json')
def ask(request):
question_id = request.GET.get('question_id')
scripts = Script.objects.all()
template = {"pds_location": "localhost:8003",
"scripts": scripts}
if request.method == 'POST':
print "post please implement"
return render_to_response('questions/ask/'+question_id+'.html',
template,
RequestContext(request))
|
{
"content_hash": "7d82930f796cecc61da911568377fcea",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 73,
"avg_line_length": 31.115384615384617,
"alnum_prop": 0.7206427688504327,
"repo_name": "eschloss/openPDS-RegistryServer-for-Heroku",
"id": "137ea213b76d726d4e49c8af1a277d364aec3711",
"size": "1642",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/questions/views.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47821"
},
{
"name": "HTML",
"bytes": "41652"
},
{
"name": "JavaScript",
"bytes": "200605"
},
{
"name": "Python",
"bytes": "85254"
}
],
"symlink_target": ""
}
|
from twilio.rest import TwilioRestClient
from datetime import datetime
import socket
import errno
##########################################################################
# Public Functions #
##########################################################################
def send_receipt(phone_number, quantity, beer, total):
"""
Sends a receipt SMS message to the user who just purchased beer
Args:
phone_number (string):
The phone number that the SMS message is to be sent to. Should be
of format "+1XXXXXXXXX". For example, if you want to text the
number (412) 523-6164, the arg should be "+14125236164"
quantity (int):
The number of oz of beer the user bought
beer (string):
The kind of beer the user bought (i.e. "Blue Moon", "Bud Light",
"Shock Top", etc...)
total (float):
The total amount that the user is to be charged
Example:
To send an SMS to (412) 523-6164 saying they just bought 12 oz of Blue Moon
and are going to be charged $1.25, do the following:
send_receipt("+14125236164", 12, "Blue Moon", 1.25)
Return:
True if message was sent succesfully. False if otherwise
"""
curr_time = str(datetime.now())
msg_body = "Purchased {0} oz of {1} for a total of ${2} at {3}"\
.format(quantity, beer, total, curr_time)
return __send(phone_number, msg_body)
def send_pin(phone_number, name, pin):
"""
Sends an SMS message to the user with their PIN number
Args:
phone_number (string):
The phone number that the SMS message is to be sent to. Should be
of format "+1XXXXXXXXX". For example, if you want to text the
number (412) 523-6164, the arg should be "+14125236164"
name (string):
The name of the person receiving the text message
pin (int):
The PIN number for the user
Example:
The following line of code:
send_pin("+14125236164", "Nick", 1234)
will send a text message to (412) 523-6164 with the contents:
'Welcome Nick! Your PIN # is 1234'
Return:
True if message was sent succesfully. False if otherwise
"""
msg_body = "Welcome {0}! Your PIN # is {1}".format(name, pin)
return __send(phone_number, msg_body)
###########################################################################
# Private Functions #
###########################################################################
def __send(to_number, msg_body):
# Find these values at https://twilio.com/user/account
account_sid = "AC83492826650db9d730643243a37dc679"
auth_token = "bb8fd116943adb81441763adf7be5bea"
client = TwilioRestClient(account_sid, auth_token)
from_number = "+14122468519" # This is a valid Twilio number
try:
message = client.messages.create(to=to_number,
from_=from_number,
body=msg_body)
# If there was an error sending the message, the errorcode
# attribute is set
if hasattr(message, 'errorcode'):
return False
else:
return True
except socket.error, v:
errorcode = v[0]
if errorcode == errno.ECONNREFUSED:
print "Sorry, Twilio couldn't connect to its servers. " \
"Please try again"
|
{
"content_hash": "ba7c32d9b8faa69954c5c9234c6a32bb",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 79,
"avg_line_length": 33.904761904761905,
"alnum_prop": 0.5370786516853933,
"repo_name": "topher23/IntelliKeg",
"id": "c3801b6aad2db65b69b0386192846a894822d26d",
"size": "3635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17183"
}
],
"symlink_target": ""
}
|
"""
This module contains a Google Sheets API hook
"""
from typing import Any, Dict, List, Optional
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.gcp.hooks.base import CloudBaseHook
class GSheetsHook(CloudBaseHook):
"""
Interact with Google Sheets via GCP connection
Reading and writing cells in Google Sheet:
https://developers.google.com/sheets/api/guides/values
:param gcp_conn_id: The connection ID to use when fetching connection info.
:type gcp_conn_id: str
:param spreadsheet_id: The Google Sheet ID to interact with
:type spreadsheet_id: str
:param api_version: API Version
:type api_version: str
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
"""
def __init__(
self,
spreadsheet_id: str,
gcp_conn_id: str = 'google_cloud_default',
api_version: str = 'v4',
delegate_to: Optional[str] = None
) -> None:
super().__init__(gcp_conn_id, delegate_to)
self.spreadsheet_id = spreadsheet_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self.delegate_to = delegate_to
self._conn = None
def get_conn(self) -> Any:
"""
Retrieves connection to Google Sheets.
:return: Google Sheets services object.
:rtype: Any
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build('sheets', self.api_version, http=http_authorized, cache_discovery=False)
return self._conn
@CloudBaseHook.catch_http_exception
def get_values(
self,
range_: str,
major_dimension: str = 'DIMENSION_UNSPECIFIED',
value_render_option: str = 'FORMATTED_VALUE',
date_time_render_option: str = 'SERIAL_NUMBER'
) -> Dict:
"""
Gets values from Google Sheet from a single range
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/get
:param range_: The A1 notation of the values to retrieve.
:type range_: str
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:type major_dimension: str
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:type value_render_option: str
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:type date_time_render_option: str
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
response = service.spreadsheets().values().get( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
range=range_,
majorDimension=major_dimension,
valueRenderOption=value_render_option,
dateTimeRenderOption=date_time_render_option
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def batch_get_values(
self,
ranges: List,
major_dimension: str = 'DIMENSION_UNSPECIFIED',
value_render_option: str = 'FORMATTED_VALUE',
date_time_render_option: str = 'SERIAL_NUMBER'
) -> Dict:
"""
Gets values from Google Sheet from a list of ranges
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchGet
:param ranges: The A1 notation of the values to retrieve.
:type ranges: List
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:type major_dimension: str
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:type value_render_option: str
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:type date_time_render_option: str
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
response = service.spreadsheets().values().batchGet( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
ranges=ranges,
majorDimension=major_dimension,
valueRenderOption=value_render_option,
dateTimeRenderOption=date_time_render_option
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def update_values(
self,
range_: str,
values: List,
major_dimension: str = 'ROWS',
value_input_option: str = 'RAW',
include_values_in_response: bool = False,
value_render_option: str = 'FORMATTED_VALUE',
date_time_render_option: str = 'SERIAL_NUMBER'
) -> Dict:
"""
Updates values from Google Sheet from a single range
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/update
:param range_: The A1 notation of the values to retrieve.
:type range_: str
:param values: Data within a range of the spreadsheet.
:type values: List
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:type major_dimension: str
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:type value_input_option: str
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:type include_values_in_response: bool
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:type value_render_option: str
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:type date_time_render_option: str
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
body = {
"range": range_,
"majorDimension": major_dimension,
"values": values
}
response = service.spreadsheets().values().update( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
range=range_,
valueInputOption=value_input_option,
includeValuesInResponse=include_values_in_response,
responseValueRenderOption=value_render_option,
responseDateTimeRenderOption=date_time_render_option,
body=body
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def batch_update_values(
self,
ranges: List,
values: List,
major_dimension: str = 'ROWS',
value_input_option: str = 'RAW',
include_values_in_response: bool = False,
value_render_option: str = 'FORMATTED_VALUE',
date_time_render_option: str = 'SERIAL_NUMBER'
) -> Dict:
"""
Updates values from Google Sheet for multiple ranges
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchUpdate
:param ranges: The A1 notation of the values to retrieve.
:type ranges: List
:param values: Data within a range of the spreadsheet.
:type values: List
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:type major_dimension: str
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:type value_input_option: str
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:type include_values_in_response: bool
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:type value_render_option: str
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:type date_time_render_option: str
:return: Google Sheets API response.
:rtype: Dict
"""
if len(ranges) != len(values):
raise AirflowException(
"'Ranges' and and 'Lists' must be of equal length. \n \
'Ranges' is of length: {} and \n \
'Values' is of length: {}.".format(str(len(ranges)), str(len(values))))
service = self.get_conn()
data = []
for idx, range_ in enumerate(ranges):
value_range = {
"range": range_,
"majorDimension": major_dimension,
"values": values[idx]
}
data.append(value_range)
body = {
"valueInputOption": value_input_option,
"data": data,
"includeValuesInResponse": include_values_in_response,
"responseValueRenderOption": value_render_option,
"responseDateTimeRenderOption": date_time_render_option
}
response = service.spreadsheets().values().batchUpdate( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
body=body
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def append_values(
self,
range_: str,
values: List,
major_dimension: str = 'ROWS',
value_input_option: str = 'RAW',
insert_data_option: str = 'OVERWRITE',
include_values_in_response: bool = False,
value_render_option: str = 'FORMATTED_VALUE',
date_time_render_option: str = 'SERIAL_NUMBER'
) -> Dict:
"""
Append values from Google Sheet from a single range
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/append
:param range_: The A1 notation of the values to retrieve.
:type range_: str
:param values: Data within a range of the spreadsheet.
:type values: List
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:type major_dimension: str
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:type value_input_option: str
:param insert_data_option: Determines how existing data is changed when new data is input.
OVERWRITE or INSERT_ROWS
:type insert_data_option: str
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:type include_values_in_response: bool
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:type value_render_option: str
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:type date_time_render_option: str
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
body = {
"range": range_,
"majorDimension": major_dimension,
"values": values
}
response = service.spreadsheets().values().append( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
range=range_,
valueInputOption=value_input_option,
insertDataOption=insert_data_option,
includeValuesInResponse=include_values_in_response,
responseValueRenderOption=value_render_option,
responseDateTimeRenderOption=date_time_render_option,
body=body
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def clear(self, range_: str) -> Dict:
"""
Clear values from Google Sheet from a single range
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear
:param range_: The A1 notation of the values to retrieve.
:type range_: str
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
response = service.spreadsheets().values().clear( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
range=range_
).execute(num_retries=self.num_retries)
return response
@CloudBaseHook.catch_http_exception
def batch_clear(self, ranges: List) -> Dict:
"""
Clear values from Google Sheet from a list of ranges
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchClear
:param ranges: The A1 notation of the values to retrieve.
:type ranges: List
:return: Google Sheets API response.
:rtype: Dict
"""
service = self.get_conn()
body = {
"ranges": ranges
}
response = service.spreadsheets().values().batchClear( # pylint: disable=no-member
spreadsheetId=self.spreadsheet_id,
body=body
).execute(num_retries=self.num_retries)
return response
|
{
"content_hash": "357e947d4725b93d9027bfac91a10d06",
"timestamp": "",
"source": "github",
"line_count": 351,
"max_line_length": 103,
"avg_line_length": 40.37891737891738,
"alnum_prop": 0.629436252028505,
"repo_name": "Fokko/incubator-airflow",
"id": "d8e37e3b9bad99ce2a1befed3563f2dfac004ef9",
"size": "14986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/gcp/hooks/gsheets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "14170"
},
{
"name": "HTML",
"bytes": "145596"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "8787104"
},
{
"name": "Shell",
"bytes": "187296"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
"""
Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
See https://llvm.org/LICENSE.txt for license information.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
"""
from __future__ import print_function
from __future__ import absolute_import
# System modules
import pprint
# Our modules
from .results_formatter import ResultsFormatter
class DumpFormatter(ResultsFormatter):
"""Formats events to the file as their raw python dictionary format."""
def handle_event(self, test_event):
super(DumpFormatter, self).handle_event(test_event)
self.out_file.write("\n" + pprint.pformat(test_event) + "\n")
|
{
"content_hash": "dd15da400a68ccd9349711b71ad3fbae",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 30.045454545454547,
"alnum_prop": 0.7367624810892587,
"repo_name": "apple/swift-lldb",
"id": "2a0cda14c3bf85b3a08c93d81520810c4d4ed667",
"size": "661",
"binary": false,
"copies": "4",
"ref": "refs/heads/stable",
"path": "packages/Python/lldbsuite/test_event/formatter/dump_formatter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "130449"
},
{
"name": "C",
"bytes": "198536"
},
{
"name": "C++",
"bytes": "27687071"
},
{
"name": "CMake",
"bytes": "172176"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "106804"
},
{
"name": "Objective-C",
"bytes": "106821"
},
{
"name": "Objective-C++",
"bytes": "25658"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "4680483"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Swift",
"bytes": "260786"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
import uuid
from django.db import models
from django.contrib.auth.models import User
User._meta.get_field('email')._unique = True
class Song(models.Model):
track_name = models.CharField(max_length=200)
track_id = models.CharField(db_index=True, max_length=200)
track_uri = models.CharField(max_length=200)
artists = models.ForeignKey('Artist')
artist_id = models.CharField(max_length=200)
album = models.CharField(max_length=200)
album_id = models.CharField(max_length=200)
album_uri = models.CharField(max_length=200)
spotify_popularity = models.IntegerField(default=0)
preview_url = models.URLField()
image_300 = models.URLField()
image_64 = models.URLField()
users = models.ManyToManyField(User, through='UserSong')
class Genre(models.Model):
name = models.CharField(max_length=100)
users = models.ManyToManyField(User, through="UserGenre")
class UserGenre(models.Model):
user = models.ForeignKey(User)
genre = models.ForeignKey(Genre)
proportion = models.FloatField()
class Artist(models.Model):
name = models.CharField(max_length=100)
genres = models.ManyToManyField(Genre)
class UserActivationCode(models.Model):
user = models.OneToOneField(User, primary_key=True)
code = models.UUIDField(default=uuid.uuid4, editable=False)
class UserProfile(models.Model):
user = models.OneToOneField(User, primary_key=True)
updated_genres = models.DateTimeField()
verified = models.BooleanField(default=False)
neighs = models.ManyToManyField(User,through="NearestNeigh", related_name="neighs")
class NearestNeigh(models.Model):
user = models.ForeignKey(UserProfile)
neighbor = models.ForeignKey(User)
distance = models.FloatField()
class FollowList(models.Model):
user = models.OneToOneField(User, primary_key=True)
following = models.ManyToManyField(User, related_name='following')
class UserSong(models.Model):
song = models.ForeignKey(Song)
user = models.ForeignKey(User)
uploaded_at = models.DateField()
synced_at = models.DateField(auto_now_add=True)
class ArtistRating(models.Model):
user = models.ForeignKey(User)
artist = models.ForeignKey(Artist)
score = models.DecimalField(max_digits=6, decimal_places=4,default=0.5)
class Post(models.Model):
user = models.ForeignKey(User)
song = models.ForeignKey(UserSong)
content = models.TextField(max_length=180)
created_at = models.DateTimeField(auto_now_add=True)
|
{
"content_hash": "92f9af7ff51150a93b98732c090848cb",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 87,
"avg_line_length": 31.2875,
"alnum_prop": 0.7271274470635237,
"repo_name": "ecatkins/instabilly",
"id": "fae695eab1341712a2fc8f27c8cdd4016944fe01",
"size": "2503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/spotify/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8217"
},
{
"name": "HTML",
"bytes": "26023"
},
{
"name": "JavaScript",
"bytes": "32468"
},
{
"name": "Python",
"bytes": "52322"
}
],
"symlink_target": ""
}
|
import re
import rumps
from timer import Timer
from config import *
class MultiTimerApp(rumps.App):
def __init__(self):
super(MultiTimerApp, self).__init__(APP_TITLE,
icon=PATH_RES_IMG_MY_TIME_ALPHA)
self.menu = [MENU_ITEM_ADD_TIMER, MENU_ITEM_REMOVE_TIMER]
self.timers = []
@rumps.timer(UPDATE_VIEW_INTERVAL)
def _update_view(self, _):
if len(self.timers) == 0:
self.title = APP_TITLE
else:
title = ''
for timer in self.timers:
title += TIME_VIEW_FORMAT.format(
timer.title.split('[')[0],
MultiTimerApp.sec_to_hms(timer.current_sec))
self.title = title
@rumps.clicked(MENU_ITEM_ADD_TIMER)
def _add_timer(self, _):
window_title = rumps.Window(title=WINDOW_TITLE_MESSAGE,
message='',
default_text='',
cancel=True,
dimensions=WINDOW_DIM)
window_title.icon = PATH_RES_IMG_MY_TIME
res_title = window_title.run()
if not res_title.clicked:
return
sec = None
while sec is None:
window_sec = rumps.Window(title=WINDOW_TIME_MESSAGE,
message='',
default_text=TIME_EXAMPLE,
cancel=True,
dimensions=WINDOW_DIM)
window_sec.icon = PATH_RES_IMG_MY_TIME
res_sec = window_sec.run()
if not res_sec.clicked:
return
try:
sec = MultiTimerApp.hms_to_sec(res_sec.text)
except TypeError:
rumps.alert(INVALID_TIME_FORMAT_MESSAGE)
title = res_title.text
def on_finish():
self._update_view(None)
rumps.alert(TIME_FINISHED_FORMAT.format(title))
self._stop_timer(self._get_timer(title))
self.timers.append(Timer(title, sec, on_finish))
self.menu.add(rumps.MenuItem(title=title,
callback=self._switch_timer,
icon=PATH_RES_IMG_PLAY))
remove_menu = self.menu[MENU_ITEM_REMOVE_TIMER]
remove_menu.set_callback(lambda: None)
remove_menu.add(rumps.MenuItem(title, callback=self._remove_timer))
def _remove_timer(self, sender):
for timer in self.timers:
if sender.title == timer.title:
# 別スレッドが生成され続けるのを防ぐため、timerはストップ
timer.stop()
self.timers.remove(timer)
self.menu.pop(timer.title)
self.menu[MENU_ITEM_REMOVE_TIMER].pop(timer.title)
break
if len(self.timers) == 0:
self.menu[MENU_ITEM_REMOVE_TIMER].set_callback(None)
def _get_timer(self, title):
for timer in self.timers:
if title == timer.title:
return timer
def _switch_timer(self, sender):
timer = self._get_timer(sender.title)
if timer.state in [Timer.STATE_PAUSE, Timer.STATE_STOP]:
self._start_timer(timer)
else:
self._pause_timer(timer)
def _start_timer(self, timer):
timer.start()
self.menu[timer.title].icon = PATH_RES_IMG_PAUSE
def _stop_timer(self, timer):
timer.stop()
self.menu[timer.title].icon = PATH_RES_IMG_PLAY
def _pause_timer(self, timer):
timer.pause()
self.menu[timer.title].icon = PATH_RES_IMG_PLAY
@staticmethod
def hms_to_sec(hms):
m = re.match(r'\d{2}:\d{2}:\d{2}', hms)
if m is not None:
h, m, s = map(int, hms.split(':'))
return h * 3600 + m * 60 + s
else:
raise TypeError
@staticmethod
def sec_to_hms(sec):
h, mod = divmod(sec, 3600)
m, s = divmod(mod, 60)
form = "{0:02d}:{1:02d}:{2:02d}".format(h, m, s) \
if h > 0 else "{0:02d}:{1:02d}".format(m, s)
return form
|
{
"content_hash": "2c69595261b96d6f06d7e7dc6a30bced",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 76,
"avg_line_length": 34.08130081300813,
"alnum_prop": 0.5062022900763359,
"repo_name": "kanairen/MultiTimer",
"id": "74cd5dff82f0967741f3c61bff901e0121a53d8d",
"size": "4287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6648"
}
],
"symlink_target": ""
}
|
u"""Test getting own and adm jobs.
:copyright: Copyright (c) 2020 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
import os
from pykern.pkcollections import PKDict
import time
def setup_module(module):
os.environ.update(
SIREPO_JOB_DRIVER_LOCAL_SLOTS_PARALLEL='2',
)
def test_adm_jobs(auth_fc):
from pykern import pkunit
from pykern.pkdebug import pkdp
def _op(fc, sim_type):
r = fc.sr_post(
'admJobs',
PKDict(simulationType=sim_type)
)
pkunit.pkeq(len(r.rows[0]), len(r.header))
pkunit.pkeq('srw', r.rows[0][0])
_run_sim(auth_fc, _op)
def test_adm_jobs_forbidden(auth_fc):
from pykern import pkunit
from pykern.pkdebug import pkdp
from sirepo import srunit
import sirepo.auth_db
def _op(fc, sim_type):
with srunit.auth_db_session():
sirepo.auth_db.UserRole.delete_all_for_column_by_values(
'uid',
[fc.sr_auth_state().uid, ],
)
r = fc.sr_post(
'admJobs',
PKDict(simulationType=sim_type),
raw_response=True,
)
pkunit.pkeq(403, r.status_code)
_run_sim(auth_fc, _op)
def test_srw_get_own_jobs(auth_fc):
from pykern import pkunit
from pykern.pkdebug import pkdp
def _op(fc, sim_type):
r = fc.sr_post(
'admJobs',
PKDict(simulationType=sim_type)
)
pkunit.pkeq(len(r.rows[0]), len(r.header))
pkunit.pkeq('srw', r.rows[0][0])
_run_sim(auth_fc, _op)
def test_srw_user_see_only_own_jobs(auth_fc):
from pykern import pkunit
from pykern.pkdebug import pkdp
from sirepo import srunit
import sirepo.auth_db
import sirepo.auth_role
def _cancel_job(user, cancel_req):
_login_as_user(user)
fc.sr_post('runCancel', cancel_req)
def _clear_role_db():
with srunit.auth_db_session():
sirepo.auth_db.UserRole.delete_all()
def _get_jobs(adm, job_count):
r = fc.sr_post(
'admJobs' if adm else 'ownJobs',
PKDict(simulationType=t)
)
pkunit.pkeq(job_count, len(r.rows), 'job_count={} len_r={} r={}', len(r.rows), job_count, r)
def _get_simulation_running():
d = auth_fc.sr_sim_data(sim_name=n, sim_type='srw')
r = fc.sr_post(
'runSimulation',
PKDict(
models=d.models,
report=m,
simulationId=d.models.simulation.simulationId,
simulationType=d.simulationType,
)
)
try:
for _ in range(10):
if r.state == 'running':
return r.nextRequest
r = fc.sr_post('runStatus', r.nextRequest)
time.sleep(1)
else:
pkunit.pkfail('Never entered running state')
except Exception:
fc.sr_post('runCancel', r.nextRequest)
raise
def _login_as_user(user):
fc.sr_logout()
r = fc.sr_post('authEmailLogin', {'email': user, 'simulationType': t})
fc.sr_email_confirm(fc, r)
def _make_user_adm(uid):
import sirepo.pkcli.roles
sirepo.pkcli.roles.add_roles(
uid,
sirepo.auth_role.ROLE_ADM,
)
with srunit.auth_db_session():
r = sirepo.auth_db.UserRole.search_all_for_column('uid')
pkunit.pkeq(1, len(r), 'One user with role adm r={}', r)
pkunit.pkeq(r[0], uid, 'Expected same uid as user')
def _register_both_users():
r = fc.sr_post('authEmailLogin', {'email': adm_user, 'simulationType': t})
fc.sr_email_confirm(fc, r)
fc.sr_post('authCompleteRegistration', {'displayName': 'abc', 'simulationType': t},)
fc.sr_get('authLogout', {'simulation_type': fc.sr_sim_type})
_make_user_adm(fc.sr_auth_state().uid)
r = fc.sr_post('authEmailLogin', {'email': non_adm_user, 'simulationType': t})
fc.sr_email_confirm(fc, r, 'xyz')
fc = auth_fc
t = 'srw'
n = "Young's Double Slit Experiment"
m = 'multiElectronAnimation'
adm_user = '[email protected]'
non_adm_user = '[email protected]'
non_adm_job_cancel_req = adm_job_cancel_req = None
try:
_clear_role_db()
_register_both_users()
non_adm_job_cancel_req = _get_simulation_running()
_login_as_user(adm_user)
adm_job_cancel_req = _get_simulation_running()
_get_jobs(True, 2)
_login_as_user(non_adm_user)
_get_jobs(False, 1)
finally:
if non_adm_job_cancel_req:
_cancel_job(non_adm_user, non_adm_job_cancel_req)
if adm_job_cancel_req:
_cancel_job(adm_user, adm_job_cancel_req)
def _run_sim(fc, op):
from pykern import pkunit
n = "Young's Double Slit Experiment"
m = 'multiElectronAnimation'
t = 'srw'
c = None
fc.sr_login_as_guest(sim_type=t)
d = fc.sr_sim_data(n)
try:
r = fc.sr_post(
'runSimulation',
PKDict(
models=d.models,
report=m,
simulationId=d.models.simulation.simulationId,
simulationType=d.simulationType,
)
)
c = r.nextRequest
for _ in range(10):
if r.state == 'running':
op(fc, t)
return
r = fc.sr_post('runStatus', r.nextRequest)
time.sleep(1)
else:
pkunit.pkfail('Never entered running state')
finally:
fc.sr_post('runCancel', c)
|
{
"content_hash": "fe1e1c434d6802e6755f2f702817aee9",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 100,
"avg_line_length": 29.561855670103093,
"alnum_prop": 0.5557105492589364,
"repo_name": "mkeilman/sirepo",
"id": "7c350bcdb7a0284b58c7c6d8adc894361b559c30",
"size": "5759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/adm_and_own_jobs_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "152"
},
{
"name": "CSS",
"bytes": "261510"
},
{
"name": "HTML",
"bytes": "346870"
},
{
"name": "JavaScript",
"bytes": "2737116"
},
{
"name": "Opal",
"bytes": "38855"
},
{
"name": "Python",
"bytes": "1982222"
},
{
"name": "Shell",
"bytes": "13951"
}
],
"symlink_target": ""
}
|
"""
Test addrv2 relay
"""
import time
from test_framework.messages import (
CAddress,
msg_addrv2,
NODE_NETWORK
)
from test_framework.mininode import P2PInterface
from test_framework.test_framework import PivxTestFramework
from test_framework.util import assert_equal
ADDRS = []
for i in range(10):
addr = CAddress()
addr.time = int(time.time()) + i
addr.nServices = NODE_NETWORK
addr.ip = "123.123.123.{}".format(i % 256)
addr.port = 8333 + i
ADDRS.append(addr)
class AddrReceiver(P2PInterface):
addrv2_received_and_checked = False
def __init__(self):
super().__init__(support_addrv2 = True)
def on_addrv2(self, message):
for addr in message.addrs:
assert_equal(addr.nServices, 1) # NODE_NETWORK
assert addr.ip.startswith('123.123.123.')
assert (8333 <= addr.port < 8343)
self.addrv2_received_and_checked = True
def wait_for_addrv2(self):
self.wait_until(lambda: "addrv2" in self.last_message)
class AddrTest(PivxTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
self.log.info('Create connection that sends addrv2 messages')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg = msg_addrv2()
self.log.info('Send too-large addrv2 message')
msg.addrs = ADDRS * 101
with self.nodes[0].assert_debug_log(['addrv2 message size = 1010']):
addr_source.send_and_ping(msg)
self.log.info('Check that addrv2 message content is relayed and added to addrman')
addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
msg.addrs = ADDRS
with self.nodes[0].assert_debug_log([
'Added 10 addresses from 127.0.0.1: 0 tried',
'received: addrv2 (131 bytes) peer=0',
'sending addrv2 (131 bytes) peer=1',
]):
addr_source.send_and_ping(msg)
self.nodes[0].setmocktime(int(time.time()) + 30 * 60)
addr_receiver.wait_for_addrv2()
assert addr_receiver.addrv2_received_and_checked
if __name__ == '__main__':
AddrTest().main()
|
{
"content_hash": "8c8441f0c649dd491e1d10e87c95411f",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 90,
"avg_line_length": 30.22972972972973,
"alnum_prop": 0.6222619579794367,
"repo_name": "PIVX-Project/PIVX",
"id": "3f6b681f232eccb40418c1c0118bab15d38337db",
"size": "2446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/p2p_addrv2_relay.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "953297"
},
{
"name": "C",
"bytes": "5168953"
},
{
"name": "C++",
"bytes": "9188709"
},
{
"name": "CMake",
"bytes": "203234"
},
{
"name": "CSS",
"bytes": "211710"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "30291"
},
{
"name": "JavaScript",
"bytes": "41357"
},
{
"name": "M4",
"bytes": "263162"
},
{
"name": "Makefile",
"bytes": "139139"
},
{
"name": "Objective-C++",
"bytes": "3642"
},
{
"name": "Python",
"bytes": "1505322"
},
{
"name": "QMake",
"bytes": "26219"
},
{
"name": "Rust",
"bytes": "139132"
},
{
"name": "Sage",
"bytes": "30188"
},
{
"name": "Shell",
"bytes": "101041"
},
{
"name": "TypeScript",
"bytes": "10706"
}
],
"symlink_target": ""
}
|
import analysis.event
import analysis.hitfinding
import analysis.pixel_detector
import analysis.sizing
import plotting.image
import plotting.line
import plotting.correlation
import plotting.histogram
from backend.record import add_record
import numpy as np
import time
import ipc
import utils.reader
import re
import os
import utils.cxiwriter
scanInjector = False
scanXmin = -250
scanXmax = 250
scanXbins = 500
scanZmin = 88
scanZmax = 100
scanZbins = 220/2
scanYmin = 94
scanYmax = 97
scanYbins = 20
outputEveryImage = True
do_sizing = False
do_write = False
do_showhybrid = False
move_half = True
#Detector params
detector_distance = 220e-03
gap_top=0.8e-03
gap_bottom=3.0e-03
gap_total=gap_top+gap_bottom
ny=1024
nx=1024
pixel_size=7.5e-05
center_shift=int((gap_top-gap_bottom)/pixel_size)
# Quick config parameters
hitScoreThreshold = 13000
aduThreshold = 200
strong_hit_threshold = 60000
#experiment_folder = "/data/beamline/current"
experiment_folder = "/asap3/flash/gpfs/bl1/2017/data/11001733"
# Specify the facility
state = {}
state['Facility'] = 'FLASH'
# Specify folders with frms6 and darkcal data
state['FLASH/DataGlob'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_*_*_*_*.frms6")
state['FLASH/DataRe'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_.+_.+_([0-9]{4})_.+.frms6")
#state['FLASH/DataGlob'] = os.path.join(experiment_folder, "raw/pnccd/block-02/holography_*_*_*_*.frms6")
state['FLASH/CalibGlob'] = os.path.join(experiment_folder, "processed/calib/block-02/*.darkcal.h5")
state['FLASH/DAQFolder'] = "/asap3/flash/gpfs/bl1/2017/data/11001733/processed/daq"
state['FLASH/MotorFolder'] = '/home/tekeberg/Beamtimes/Holography2017/motor_positions/motor_data.data'
state['FLASH/DAQBaseDir'] = os.path.join(experiment_folder, "raw/hdf/block-02")
state['do_offline'] = True
state['online_start_from_run'] = False
#state['FLASH/ProcessingRate'] = 1
#Mask
Mask = utils.reader.MaskReader("/asap3/flash/gpfs/bl1/2017/data/11001733/processed/mask_v1.h5", "/data")
mask = Mask.boolean_mask
#Mask out center
mask_center=np.ones((ny, nx), dtype=np.bool)
radius=30
#radius=70
cx=0
cy=0
xx,yy=np.meshgrid(np.arange(nx), np.arange(ny))
rr=(xx-nx/2)**2+(yy-ny/2)**2 >= (radius**2)
mask_center &= rr
mask_center &= mask
# Sizing parameters
# ------
binning = 4
centerParams = {'x0' : (512 - (nx-1)/2.)/binning,
'y0' : (512 + center_shift -(ny-1)/2.)/binning,
'maxshift' : int(np.ceil(10./binning)),
'threshold': 1,
'blur' : 4}
modelParams = {'wavelength': 5.3, #in nm
'pixelsize': 75*binning, #um
'distance': 220., #mm
'material': 'sucrose'}
sizingParams = {'d0':20., # in nm
'i0':1., # in mJ/um2
'brute_evals':10}
# Physical constants
h = 6.62606957e-34 #[Js]
c = 299792458 #[m/s]
hc = h*c #[Jm]
eV_to_J = 1.602e-19 #[J/eV]
#res = modelParams["distance"] * 1E-3* modelParams["wavelength"] * 1E-9 / ( pixelsize_native * nx_front )
#expected_diameter = 150
# Thresholds for good sizing fits
fit_error_threshold = 2.6E-3#4.0e-3
photon_error_threshold = 3000
diameter_min = 40 #[nm]
diameter_max = 90 #[nm]
def calculate_epoch_times(evt, time_sec, time_usec):
add_record(evt['ID'], 'ID', 'time', time_sec.data + 1.e-6*time_usec.data)
#add_record(evt['ID'], 'ID', 'timeAgo', time.time() - (time_sec.data + 1.e-6*time_usec.data))
# Calculating timeAgo with 606 second offset due to miscalibration of pnCCD server clock
#add_record(evt['ID'], 'ID', 'timeAgo', -606. + time.time() - (time_sec.data + 1.e-6*time_usec.data))
add_record(evt['ID'], 'ID', 'timeAgo', 0. + time.time() - (time_sec.data + 1.e-6*time_usec.data))
def beginning_of_run():
if do_write:
global W
W = utils.cxiwriter.CXIWriter("/asap3/flash/gpfs/bl1/2017/data/11001733/processed/tof_88_91.h5", chunksize=10)
# This function is called for every single event
# following the given recipe of analysis
def onEvent(evt):
# Processing rate [Hz]
analysis.event.printProcessingRate()
# try:
# has_tof = True
# evt["DAQ"]["TOF"]
# print "We have TOF data!"
# except RuntimeError:
# has_tof = False
# #print "No TOF"
has_tof = False
detector_type = "photonPixelDetectors"
detector_key = "pnCCD"
if move_half:
detector = evt[detector_type][detector_key]
detector = analysis.pixel_detector.moveHalf(evt, detector, horizontal=int(gap_total/pixel_size), outkey='data_half-moved')
mask_center_s = analysis.pixel_detector.moveHalf(evt, add_record(evt["analysis"], "analysis", "mask", mask_center), horizontal=int(gap_total/pixel_size), outkey='mask_half-moved').data
detector_type = "analysis"
detector_key = "data_half-moved"
# Do basic hitfinding using lit pixels
analysis.hitfinding.countLitPixels(evt, evt[detector_type][detector_key],
aduThreshold=aduThreshold,
hitscoreThreshold=hitScoreThreshold, mask=mask_center_s)
hit = bool(evt["analysis"]["litpixel: isHit"].data)
strong_hit=evt["analysis"]["litpixel: hitscore"].data>strong_hit_threshold
plotting.line.plotHistory(add_record(evt["analysis"],"analysis","total ADUs", evt[detector_type][detector_key].data.sum()),
label='Total ADU', hline=hitScoreThreshold, group='Metric')
plotting.line.plotHistory(evt["analysis"]["litpixel: hitscore"],
label='Nr. of lit pixels', hline=hitScoreThreshold, group='Metric')
analysis.hitfinding.hitrate(evt, hit, history=50)
if hit and has_tof:
print evt["DAQ"]["TOF"].data
print evt["motorPositions"]["InjectorZ"].data
plotting.line.plotTrace(evt["DAQ"]["TOF"], label='TOF', history=100, tracelen=20000, name="TOF", group="TOF")
plotting.line.plotHistory(evt["motorPositions"]["InjectorZ"], label="InjectorZ (with TOF)", group="TOF")
plotting.image.plotImage(evt[detector_type][detector_key], name="pnCCD (Hits with TOF)", group='TOF', mask=mask_center_s)
D = {}
D['TOF'] = evt['DAQ']['TOF'].data
D['pnCCD'] = evt[detector_type][detector_key].data
D['InjectorZ'] = evt["motorPositions"]["InjectorZ"].data
if do_write:
W.write_slice(D)
def end_of_run():
if do_write:
W.close()
|
{
"content_hash": "4e7e48f8c132a20bd19c027dd515af23",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 192,
"avg_line_length": 34.2,
"alnum_prop": 0.6518928901200369,
"repo_name": "SPIhub/hummingbird",
"id": "205eaad26b6871eeb735554ee5d0f16ccf2c9139",
"size": "6533",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/flash/holo-2017/conf_tof.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "624263"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
from .dynamic import Dynamic
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def __eq__(self, other):
return isinstance(other, Argument) and (
self.name == other.name,
self.type == other.type,
self.default_value == other.default_value,
self.description == other.description
)
def to_arguments(args, extra_args=None):
from .unmountedtype import UnmountedType
from .field import Field
from .inputfield import InputField
if extra_args:
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
else:
extra_args = []
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, Dynamic):
arg = arg.get_type()
if arg is None:
# If the Dynamic type returned None
# then we skip the Argument
continue
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if isinstance(arg, (InputField, Field)):
raise ValueError('Expected {} to be Argument, but received {}. Try using Argument({}).'.format(
default_name,
type(arg).__name__,
arg.type
))
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
{
"content_hash": "95a05f743878584298c5f4c684d0a2f8",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 118,
"avg_line_length": 32.46969696969697,
"alnum_prop": 0.6061595893607092,
"repo_name": "Globegitter/graphene",
"id": "8a621b4267edbddb368a07894c2ce235b84ea295",
"size": "2143",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graphene/types/argument.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "182262"
},
{
"name": "Shell",
"bytes": "393"
}
],
"symlink_target": ""
}
|
""" Sahana Eden Disaster Victim Registration Model
@copyright: 2012-15 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3DVRModel",)
from gluon import *
from gluon.storage import Storage
from gluon.tools import callback
from ..s3 import *
from s3layouts import S3PopupLink
# =============================================================================
class S3DVRModel(S3Model):
"""
Allow an individual or household to register to receive
compensation and/or distributions of relief items
"""
names = ("dvr_need",
"dvr_case",
"dvr_case_need",
)
def model(self):
T = current.T
db = current.db
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
configure = self.configure
# ---------------------------------------------------------------------
# Case
#
#dvr_damage_opts = {
# 1: T("Very High"),
# 2: T("High"),
# 3: T("Medium"),
# 4: T("Low"),
#}
dvr_status_opts = {
1: T("Open"),
2: T("Pending"),
3: T("Closed"),
}
tablename = "dvr_case"
define_table(tablename,
# @ToDo: Option to autogenerate these, like Waybills, et al
Field("reference",
label = T("Case Number"),
),
self.org_organisation_id(),
self.pr_person_id(
# @ToDo: Modify this to update location_id if the selected
# person has a Home Address already
comment = None,
represent = self.pr_PersonRepresent(show_link=True),
requires = IS_ADD_PERSON_WIDGET2(),
widget = S3AddPersonWidget2(controller="pr"),
),
#Field("damage", "integer",
# label= T("Damage Assessment"),
# represent = lambda opt: \
# dvr_damage_opts.get(opt, UNKNOWN_OPT),
# requires = IS_EMPTY_OR(IS_IN_SET(dvr_damage_opts)),
# ),
#Field("insurance", "boolean",
# label = T("Insurance"),
# represent = s3_yes_no_represent,
# ),
Field("status", "integer",
default = 1,
label = T("Status"),
represent = S3Represent(options=dvr_status_opts),
requires = IS_EMPTY_OR(IS_IN_SET(dvr_status_opts)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Case"),
title_display = T("Case Details"),
title_list = T("Cases"),
title_update = T("Edit Case"),
label_list_button = T("List Cases"),
label_delete_button = T("Delete Case"),
msg_record_created = T("Case added"),
msg_record_modified = T("Case updated"),
msg_record_deleted = T("Case deleted"),
msg_list_empty = T("No Cases found")
)
represent = S3Represent(lookup=tablename, fields=("reference",))
case_id = S3ReusableField("case_id", "reference %s" % tablename,
label = T("Case"),
ondelete = "RESTRICT",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "dvr_case.id",
represent)),
)
self.add_components(tablename,
dvr_need = {"link": "dvr_case_need",
"joinby": "case_id",
"key": "need_id",
},
pr_address = ({"name": "current_address",
"link": "pr_person",
"joinby": "id",
"key": "pe_id",
"fkey": "pe_id",
"pkey": "person_id",
"filterby": "type",
"filterfor": ("1",),
},
{"name": "permanent_address",
"link": "pr_person",
"joinby": "id",
"key": "pe_id",
"fkey": "pe_id",
"pkey": "person_id",
"filterby": "type",
"filterfor": ("2",),
},
),
)
crud_form = S3SQLCustomForm("reference",
"organisation_id",
"status",
"person_id",
S3SQLInlineComponent("current_address",
label = T("Current Address"),
fields = [("", "location_id"),
],
default = {"type": 1}, # Current Home Address
link = False,
multiple = False,
),
S3SQLInlineComponent("permanent_address",
comment = T("If Displaced"),
label = T("Normal Address"),
fields = [("", "location_id"),
],
default = {"type": 2}, # Permanent Home Address
link = False,
multiple = False,
),
S3SQLInlineLink("need",
field = "need_id",
),
"comments",
)
axes = ["organisation_id",
"case_need.need_id",
]
levels = current.gis.get_relevant_hierarchy_levels()
for level in levels:
axes.append("current_address.location_id$%s" % level)
highest_lx = "current_address.location_id$%s" % levels[0]
facts = [(T("Number of Cases"), "count(id)"),
]
report_options = {"rows": axes,
"cols": axes,
"fact": facts,
"defaults": {"rows": "case_need.need_id",
"cols": highest_lx,
"fact": facts[0],
"totals": True,
},
}
configure(tablename,
crud_form = crud_form,
report_options = report_options,
)
# ---------------------------------------------------------------------
# Needs
#
tablename = "dvr_need"
define_table(tablename,
Field("name",
label = T("Name"),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_NEED = T("Create Need")
crud_strings[tablename] = Storage(
label_create = ADD_NEED,
title_display = T("Need Details"),
title_list = T("Needs"),
title_update = T("Edit Need"),
label_list_button = T("List Needs"),
label_delete_button = T("Delete Need"),
msg_record_created = T("Need added"),
msg_record_modified = T("Need updated"),
msg_record_deleted = T("Need deleted"),
msg_list_empty = T("No Needs found")
)
represent = S3Represent(lookup=tablename, translate=True)
need_id = S3ReusableField("need_id", "reference %s" % tablename,
label = T("Need"),
ondelete = "RESTRICT",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "dvr_need.id",
represent)),
comment=S3PopupLink(c = "dvr",
f = "need",
label = ADD_NEED,
),
)
configure(tablename,
deduplicate = S3Duplicate(),
)
# ---------------------------------------------------------------------
# Cases <> Needs
#
tablename = "dvr_case_need"
define_table(tablename,
case_id(empty = False,
ondelete = "CASCADE",
),
need_id(empty = False,
ondelete = "CASCADE",
),
#s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {}
# END =========================================================================
|
{
"content_hash": "8aa9d394aa0710eda6854cea1bd37bd7",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 104,
"avg_line_length": 43.723636363636366,
"alnum_prop": 0.35944777112441784,
"repo_name": "anurag-ks/eden",
"id": "5eb1d2d9b736cba38f61b7486652a99a45039fab",
"size": "12049",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/s3db/dvr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "2378623"
},
{
"name": "HTML",
"bytes": "1343047"
},
{
"name": "JavaScript",
"bytes": "20063127"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "30127285"
},
{
"name": "Ruby",
"bytes": "3611"
},
{
"name": "Shell",
"bytes": "5057"
},
{
"name": "XSLT",
"bytes": "2975882"
}
],
"symlink_target": ""
}
|
import httplib2 as http
import urllib
import json
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
class Bourdon(object):
def __init__(self, user, uri):
self.user = user
self.uri = uri
self.headers = {
'Accept': 'application/json',
'Content-Type': 'application/json; charset=UTF-8'
}
def param(self, opts):
return urllib.urlencode(opts)
def request(self, method='GET', path='', body='', opts={}):
target = urlparse(self.uri + path)
h = http.Http()
# authentication:
h.add_credentials(self.user, 'xxx')
# TODO use opts param for GET/PUT/POST
if method == 'GET':
response, content = h.request(target.geturl() +'?' + self.param(opts), method, body, self.headers)
else:
response, content = h.request(target.geturl(), method, self.param(opts), self.headers)
return {'response': response, 'content': json.loads(content)}
def get_account(self):
"""Retrieves details about the Teamwork account."""
return self.request(path='/account.json')
def get_info(self):
return self.request(path='/api')
def get_projects(self, opts={}):
return self.request(path='/projects', opts=opts)
def get_project(self, project_id, opts={}):
return self.request(path='/projects/%d' % project_id, opts=opts)
def create_project(self, project_id, opts={}):
return self.request(method='POST', path='/projects/%d' % project_id, opts=opts)
def update_project(self, project_id, opts={}):
return self.request(method='PUT', path='/project/%d' % project_id, opts=opts)
def delete_project(self, project_id):
return self.request(method='DELETE', path='/project/%d' % project_id)
|
{
"content_hash": "8fcbb446ebae308fe51c8e2108d3ff20",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 110,
"avg_line_length": 30.459016393442624,
"alnum_prop": 0.6114101184068891,
"repo_name": "malenkiki/bourdon",
"id": "83db8b15c070af9c0bcc00e307ebbb3c3234027c",
"size": "3007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bourdonlib/bourdon.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7028"
}
],
"symlink_target": ""
}
|
import pdb
import os
import hashlib
from bottle import route, run, template, SimpleTemplate, static_file, request, redirect
#from TransmissionClient import NoSuchTorrent, TransmissionClient
# FILE UPLOAD FROM STACK OVERFLOW
# http://stackoverflow.com/questions/22839474/python-bottle-how-to-upload-media-files-without-dosing-the-server
MAX_SIZE = 10 * 1024 * 1024 # 10MB
BUF_SIZE = 8192
@route('/hello/<name>')
def hello(name):
return template('<b>Hello {{name}}</b>!', name=name)
@route('/static/<filepath:path>')
def server_static(filepath):
return static_file(filepath, root='/opt/ts/bootstrap')
@route('/')
def index():
return template('index')
@route('/upload', method='POST')
def upload():
'''Upload function from StackOverflow'''
# Move to config file
upload_dir = '/tmp/torrents/'
#hash file being uploaded
h = hashlib.new('sha1')
files = request.files
print files, type(files)
data = request.files.data
if(data is not None):
fileinfo = {}
uploadfile = data
#pdb.set_trace()
print uploadfile.filename, type(uploadfile)
fileinfo['name'] = uploadfile.filename
target_path = os.path.join(upload_dir, uploadfile.filename)
print target_path
# add Ron.Rothman's code
# hash file as being read
data_blocks = []
buf = uploadfile.file.read(8192)
h.update(buf)
while buf:
data_blocks.append(buf)
buf = uploadfile.file.read(8192)
h.update(buf)
fileinfo['hash'] = h.hexdigest()
my_file_data = ''.join(data_blocks)
with open(target_path, 'wb') as tf:
tf.write(my_file_data)
fileinfo['size'] = tf.tell()
# write data to server db
# write filename, hash, size, generated address for payment, datetime, calculateprice
redirect('/pricefile/{0}'.format(fileinfo['hash']))
return None
@route('/sendtorrent', method='GET')
def upload_form():
return template('upload_form')
@route('/pricefile/<filehash>')
def pricing(filehash):
# get info from db to display here
# generate qr code for address
return template('pricing', filehash=filehash)
run(host='localhost', port=8080)
|
{
"content_hash": "ed7cbf89558abb35c04ca0636cbd0142",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 111,
"avg_line_length": 27.240963855421686,
"alnum_prop": 0.6421937195931003,
"repo_name": "erik998/tontineseed",
"id": "99783034b8646158b2a9f530f0bbefb2168715e4",
"size": "2285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tontineseed/tontineseed.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2094"
},
{
"name": "Python",
"bytes": "20583"
},
{
"name": "Smarty",
"bytes": "3796"
}
],
"symlink_target": ""
}
|
"""
OVERVIEW:
Master Script that checks a given dataset directory for any processing requests.
Currently only 16S processing supported.
"""
import numpy as np
import os
import os.path
import sys
import pickle
from optparse import OptionParser
from SummaryParser import *
import QualityControl as QC
# Read in arguments
usage = "%prog -i DATASET_DIR"
parser = OptionParser(usage)
parser.add_option("-i", "--datadir", type="string", dest="datadir")
(options, args) = parser.parse_args()
if( not options.datadir ):
parser.error("No directory specified for the data.")
# Pipe stdout and stderr to logfiles in the new directory
working_directory = options.datadir
sys.stdout = open(os.path.join(working_directory, 'stdout_master.log'),'w')
sys.stderr = open(os.path.join(working_directory, 'stderr_master.log'),'w')
# Parse summary file
summary_filename = os.path.join(options.datadir, 'summary_file.txt')
summary_obj = SummaryParser(summary_filename)
summary_obj.ReadSummaryFile()
# Check if 16S processing is required, and if so, launch the processing pipeline.
if(summary_obj.attribute_value_16S['PROCESSED'] == 'True'):
print "[[ 16S processing ]] Processing already complete."
elif(summary_obj.attribute_value_16S['PROCESSED'] == 'False'):
flags = ''
# Check if multiple FASTQ files are provided. If so, assuming they are demultiplexed (each file is a particular sample/barcode).
try:
raw_data_summary_file = summary_obj.attribute_value_16S['RAW_FASTQ_FILES']
flags = flags + ' -m True '
except:
try:
raw_data_summary_file = summary_obj.attribute_value_16S['RAW_FASTA_FILES']
flags = flags + ' -m True '
except:
flags = flags + ' -m False '
# Check if paired end reads need to be merged
try:
paired_ends = summary_obj.attribute_value_16S['MERGE_PAIRS']
flags = flags + ' -r ' + paired_ends
except:
pass
# Check if output directory is specified
try:
outdir = summary_obj.attribute_value_16S['OUTDIR']
flags = flags + ' -o ' + outdir
except:
pass
# Check if primers have been removed
if summary_obj.attribute_value_16S['PRIMERS_FILE'] == 'None':
flags = flags + ' -p True'
print "[[ 16S processing ]] No primers file. Assuming primers have been trimmed."
if summary_obj.attribute_value_16S['BARCODES_MAP'] == 'None':
flags = flags + ' -b True'
print "[[ 16S processing ]] No barcodes map. Assuming sequences have been demultiplexed and relabeled with sample IDs."
print "[[ 16S processing ]] Processing required. Generating OTU tables."
raw2otu_cmd = 'python ~/scripts/raw2otu.py -i ' + working_directory + flags
os.system(raw2otu_cmd)
# Check summary file again
summary_obj.ReadSummaryFile()
if(summary_obj.attribute_value_16S['PROCESSED'] == 'False'):
print "[[ 16S processing ]] ERROR: Failed to process 16S data"
raise NameError('ERROR: Failed to process 16S data.')
elif(summary_ob.attribute_value_16S['PROCESSED'] == 'True'):
print "[[ 16S processing ]] Processing complete."
else:
print "[[ 16S processing ]] Unknown case. Proceeding..."
# Check if ITS processing is required, and if so, launch the processing pipeline.
if(summary_obj.attribute_value_ITS['PROCESSED'] == 'True'):
print "[[ ITS processing ]] Processing already complete."
elif(summary_obj.attribute_value_ITS['PROCESSED'] == 'False'):
flags = ''
# Check if multiple FASTQ files are provided. If so, assuming they are demultiplexed (each file is a particular sample/barcode).
try:
raw_data_summary_file = summary_obj.attribute_value_ITS['RAW_FASTQ_FILES']
flags = flags + ' -m True '
except:
try:
raw_data_summary_file = summary_obj.attribute_value_ITS['RAW_FASTA_FILES']
flags = flags + ' -m True '
except:
flags = flags + ' -m False '
# Check if primers have been removed
if summary_obj.attribute_value_ITS['PRIMERS_FILE'] == 'None':
flags = flags + ' -p True'
print "[[ ITS processing ]] No primers file. Assuming primers have been trimmed."
if summary_obj.attribute_value_ITS['BARCODES_MAP'] == 'None':
flags = flags + ' -b True'
print "[[ ITS processing ]] No barcodes map. Assuming sequences have been demultiplexed and relabeled with sample IDs."
print "[[ ITS processing ]] Processing required. Generating OTU tables."
raw2otu_cmd = 'python ~/scripts/raw2otu.py -i ' + working_directory + flags
os.system(raw2otu_cmd)
# Check summary file again
summary_obj.ReadSummaryFile()
if(summary_obj.attribute_value_ITS['PROCESSED'] == 'False'):
print "[[ ITS processing ]] ERROR: Failed to process ITS data"
raise NameError('ERROR: Failed to process ITS data.')
elif(summary_ob.attribute_value_ITS['PROCESSED'] == 'True'):
print "[[ ITS processing ]] Processing complete."
else:
print "[[ ITS processing ]] Unknown case. Proceeding..."
else:
print "[[ ITS processing ]] No processing request specified."
|
{
"content_hash": "69b9d8bac11b7f4934bb08e1d9742d7a",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 133,
"avg_line_length": 39.203007518796994,
"alnum_prop": 0.6582278481012658,
"repo_name": "thomasgurry/amplicon_sequencing_pipeline",
"id": "4b36aabae7110295b12cc6f877ec550da4d7b06a",
"size": "5214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/Master.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "207811"
}
],
"symlink_target": ""
}
|
import django
import floppyforms
from copy import deepcopy
_WIDGET_COMMON_ATTRIBUTES = (
'is_hidden',
'needs_multipart_form',
'is_localized',
'is_required')
_WIDGET_COMMON_ARGUMENTS = ('attrs',)
def _copy_attributes(original, new_widget, attributes):
for attr in attributes:
original_value = getattr(original, attr)
original_value = deepcopy(original_value)
# Don't set the attribute if it is a property. In that case we can be
# sure that the widget class is taking care of the calculation for that
# property.
old_value_on_new_widget = getattr(new_widget.__class__, attr, None)
if not isinstance(old_value_on_new_widget, property):
setattr(new_widget, attr, original_value)
def _create_widget(widget_class, copy_attributes=(), init_arguments=()):
# attach defaults that apply for all widgets
copy_attributes = tuple(copy_attributes) + _WIDGET_COMMON_ATTRIBUTES
init_arguments = tuple(init_arguments) + _WIDGET_COMMON_ARGUMENTS
def create_new_widget(original):
kwargs = {}
for argname in init_arguments:
kwargs[argname] = getattr(original, argname)
new_widget = widget_class(**kwargs)
_copy_attributes(
original,
new_widget,
copy_attributes)
return new_widget
return create_new_widget
def _create_radioselect(original):
# return original widget if the renderer is something else than what
# django ships with by default. This means if this condition evaluates to
# true, then a custom renderer was specified. We cannot emulate its
# behaviour so we shouldn't guess and just return the original widget
if original.renderer is not django.forms.widgets.RadioFieldRenderer:
return original
create_new_widget = _create_widget(
floppyforms.widgets.RadioSelect,
('choices', 'allow_multiple_selected',))
return create_new_widget(original)
def _create_splitdatetimewidget(widget_class):
def create_new_widget(original):
new_widget = widget_class(
attrs=original.attrs,
date_format=original.widgets[0].format,
time_format=original.widgets[1].format)
_copy_attributes(original, new_widget, _WIDGET_COMMON_ARGUMENTS)
return new_widget
return create_new_widget
def _create_multiwidget(widget_class, copy_attributes=(), init_arguments=()):
create_new_widget = _create_widget(widget_class, copy_attributes,
init_arguments)
def create_new_multiwidget(original):
multiwidget = create_new_widget(original)
multiwidget.widgets = [
floppify_widget(widget)
for widget in multiwidget.widgets]
return multiwidget
return create_new_multiwidget
# this dictionary keeps a mapping from django's widget classes to a callable
# that will accept an instance of this class. It will return a new instance of
# a corresponding floppyforms widget, with the same semantics -- all relevant
# attributes will be copied to the new widget.
_django_to_floppyforms_widget = {
django.forms.widgets.Input:
_create_widget(floppyforms.widgets.Input, ('input_type',)),
django.forms.widgets.TextInput:
_create_widget(floppyforms.widgets.TextInput, ('input_type',)),
django.forms.widgets.PasswordInput:
_create_widget(floppyforms.widgets.PasswordInput, ('input_type',)),
django.forms.widgets.HiddenInput:
_create_widget(floppyforms.widgets.HiddenInput, ('input_type',)),
django.forms.widgets.MultipleHiddenInput:
_create_widget(
floppyforms.widgets.MultipleHiddenInput,
('input_type',),
init_arguments=('choices',)),
django.forms.widgets.FileInput:
_create_widget(floppyforms.widgets.FileInput, ('input_type',)),
django.forms.widgets.ClearableFileInput:
_create_widget(
floppyforms.widgets.ClearableFileInput,
(
'input_type', 'initial_text', 'input_text',
'clear_checkbox_label', 'template_with_initial',
'template_with_clear')),
django.forms.widgets.Textarea:
_create_widget(floppyforms.widgets.Textarea),
django.forms.widgets.DateInput:
_create_widget(
floppyforms.widgets.DateInput,
init_arguments=('format',)),
django.forms.widgets.DateTimeInput:
_create_widget(
floppyforms.widgets.DateTimeInput,
init_arguments=('format',)),
django.forms.widgets.TimeInput:
_create_widget(
floppyforms.widgets.TimeInput,
init_arguments=('format',)),
django.forms.widgets.CheckboxInput:
_create_widget(floppyforms.widgets.CheckboxInput, ('check_test',)),
django.forms.widgets.Select:
_create_widget(
floppyforms.widgets.Select,
('choices', 'allow_multiple_selected',)),
django.forms.widgets.NullBooleanSelect:
_create_widget(
floppyforms.widgets.NullBooleanSelect,
('choices', 'allow_multiple_selected',)),
django.forms.widgets.SelectMultiple:
_create_widget(
floppyforms.widgets.SelectMultiple,
('choices', 'allow_multiple_selected',)),
django.forms.widgets.RadioSelect:
_create_radioselect,
django.forms.widgets.CheckboxSelectMultiple:
_create_widget(
floppyforms.widgets.CheckboxSelectMultiple,
('choices', 'allow_multiple_selected',)),
django.forms.widgets.MultiWidget:
_create_widget(
floppyforms.widgets.MultiWidget,
init_arguments=('widgets',)),
django.forms.widgets.SplitDateTimeWidget:
_create_splitdatetimewidget(
floppyforms.widgets.SplitDateTimeWidget),
django.forms.widgets.SplitHiddenDateTimeWidget:
_create_splitdatetimewidget(
floppyforms.widgets.SplitHiddenDateTimeWidget),
django.forms.extras.widgets.SelectDateWidget:
_create_widget(
floppyforms.widgets.SelectDateWidget,
init_arguments=('years',) if django.VERSION >= (1, 7) else ('years', 'required')),
}
_django_field_to_floppyform_widget = {
django.forms.fields.FloatField:
_create_widget(floppyforms.widgets.NumberInput),
django.forms.fields.DecimalField:
_create_widget(floppyforms.widgets.NumberInput),
django.forms.fields.IntegerField:
_create_widget(floppyforms.widgets.NumberInput),
django.forms.fields.EmailField:
_create_widget(floppyforms.widgets.EmailInput),
django.forms.fields.URLField:
_create_widget(floppyforms.widgets.URLInput),
django.forms.fields.SlugField:
_create_widget(floppyforms.widgets.SlugInput),
django.forms.fields.GenericIPAddressField:
_create_widget(floppyforms.widgets.TextInput),
django.forms.fields.SplitDateTimeField:
_create_splitdatetimewidget(floppyforms.widgets.SplitDateTimeWidget),
}
def allow_floppify_widget_for_field(field):
'''
We only allow to replace a widget with the floppyform counterpart if the
original, by django determined widget is still in place. We don't want to
override custom widgets that a user specified.
'''
# There is a special case for IntegerFields (and all subclasses) that
# replaces the default TextInput with a NumberInput, if localization is
# turned off. That applies for Django 1.6 upwards.
# See the relevant source code in django:
# https://github.com/django/django/blob/1.9.6/django/forms/fields.py#L261-264
if isinstance(field, django.forms.IntegerField) and not field.localize:
if field.widget.__class__ is django.forms.NumberInput:
return True
# We can check if the widget was replaced by comparing the class of the
# specified widget with the default widget that is specified on the field
# class.
if field.widget.__class__ is field.__class__.widget:
return True
# At that point we are assuming that the user replaced the original widget
# with a custom one. So we don't allow to overwrite it.
return False
def floppify_widget(widget, field=None):
'''
Get an instance of django.forms.widgets.Widget and return a new widget
instance but using the corresponding floppyforms widget class.
Only original django widgets will be replaced with a floppyforms version.
The widget will be returned unaltered if it is not known, e.g. if it's a
custom widget from a third-party app.
The optional parameter ``field`` can be used to influence the widget
creation further. This is useful since floppyforms supports more widgets
than django does. For example is django using a ``TextInput`` for a
``EmailField``, but floppyforms has a better suiting widget called
``EmailInput``. If a widget is found specifically for the passed in
``field``, it will take precendence to the first parameter ``widget``
which will effectively be ignored.
'''
if field is not None:
create_widget = _django_field_to_floppyform_widget.get(
field.__class__)
if create_widget is not None:
if allow_floppify_widget_for_field(field):
return create_widget(widget)
create_widget = _django_to_floppyforms_widget.get(widget.__class__)
if create_widget is not None:
return create_widget(widget)
return widget
def floppify_form(form_class):
'''
Take a normal form and return a subclass of that form that replaces all
django widgets with the corresponding floppyforms widgets.
'''
new_form_class = type(form_class.__name__, (form_class,), {})
for field in new_form_class.base_fields.values():
field.widget = floppify_widget(field.widget, field=field)
return new_form_class
def modelform_factory(model, form=django.forms.models.ModelForm, fields=None,
exclude=None, formfield_callback=None, widgets=None):
form_class = django.forms.models.modelform_factory(
model=model,
form=form,
fields=fields,
exclude=exclude,
formfield_callback=formfield_callback,
widgets=widgets)
return floppify_form(form_class)
|
{
"content_hash": "42a69269e8ccb134176c4a401e462d08",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 94,
"avg_line_length": 40.87747035573123,
"alnum_prop": 0.6765615935022239,
"repo_name": "andrewsmedina/django-admin2",
"id": "bf439304622c7c3ad2e79e53b7f3f7aa6d835613",
"size": "10342",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "djadmin2/contrib/floppyforms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "19219"
},
{
"name": "HTML",
"bytes": "42539"
},
{
"name": "JavaScript",
"bytes": "2540"
},
{
"name": "Python",
"bytes": "229693"
}
],
"symlink_target": ""
}
|
"""Calxeda: __init__.py """
import sys
import time
from cxmanage_api.tftp import InternalTftp, ExternalTftp
from cxmanage_api.node import Node
from cxmanage_api.tasks import TaskQueue
from cxmanage_api.cx_exceptions import TftpException
COMPONENTS = [
("ecme_version", "ECME version"),
("cdb_version", "CDB version"),
("stage2_version", "Stage2boot version"),
("bootlog_version", "Bootlog version"),
("a9boot_version", "A9boot version"),
("a15boot_version", "A15boot version"),
("uboot_version", "Uboot version"),
("ubootenv_version", "Ubootenv version"),
("dtb_version", "DTB version"),
("node_eeprom_version", "Node EEPROM version"),
("node_eeprom_config", "Node EEPROM config"),
("slot_eeprom_version", "Slot EEPROM version"),
("slot_eeprom_config", "Slot EEPROM config"),
("pmic_version", "PMIC version")
]
def get_tftp(args):
"""Get a TFTP server"""
if args.internal_tftp:
tftp_args = args.internal_tftp.split(':')
if len(tftp_args) == 1:
ip_address = tftp_args[0]
port = 0
elif len(tftp_args) == 2:
ip_address = tftp_args[0]
port = int(tftp_args[1])
else:
print ('ERROR: %s is not a valid argument for --internal-tftp'
% args.internal_tftp)
sys.exit(1)
return InternalTftp(ip_address=ip_address, port=port,
verbose=args.verbose)
elif args.external_tftp:
tftp_args = args.external_tftp.split(':')
if len(tftp_args) == 1:
ip_address = tftp_args[0]
port = 69
elif len(tftp_args) == 2:
ip_address = tftp_args[0]
port = int(tftp_args[1])
else:
print ('ERROR: %s is not a valid argument for --external-tftp'
% args.external_tftp)
sys.exit(1)
return ExternalTftp(ip_address=ip_address, port=port,
verbose=args.verbose)
return InternalTftp(verbose=args.verbose)
# pylint: disable=R0912
def get_nodes(args, tftp, verify_prompt=False):
"""Get nodes"""
hosts = []
for entry in args.hostname.split(','):
hosts.extend(parse_host_entry(entry))
credentials = {
"ecme_username": args.user,
"ecme_password": args.password,
"linux_username": args.linux_username,
"linux_password": args.linux_password
}
nodes = [
Node(
ip_address=x, credentials=credentials, tftp=tftp,
ecme_tftp_port=args.ecme_tftp_port, verbose=args.verbose
)
for x in hosts
]
if args.all_nodes:
if not args.quiet:
print("Getting IP addresses...")
results, errors = run_command(
args, nodes, "get_fabric_ipinfo", args.force
)
all_nodes = []
for node in nodes:
if node in results:
for node_id, ip_address in sorted(results[node].iteritems()):
new_node = Node(
ip_address=ip_address, credentials=credentials,
tftp=tftp, ecme_tftp_port=args.ecme_tftp_port,
verbose=args.verbose
)
new_node.node_id = node_id
if not new_node in all_nodes:
all_nodes.append(new_node)
node_strings = get_node_strings(args, all_nodes, justify=False)
if not args.quiet and all_nodes:
print("Discovered the following IP addresses:")
for node in all_nodes:
print node_strings[node]
print
if errors:
print("ERROR: Failed to get IP addresses. Aborting.\n")
sys.exit(1)
if args.nodes:
if len(all_nodes) != args.nodes:
print ("ERROR: Discovered %i nodes, expected %i. Aborting.\n"
% (len(all_nodes), args.nodes))
sys.exit(1)
elif verify_prompt and not args.force:
print(
"NOTE: Please check node count! Ensure discovery of all " +
"nodes in the cluster. Power cycle your system if the " +
"discovered node count does not equal nodes in " +
"your system.\n"
)
if not prompt_yes("Discovered %i nodes. Continue?"
% len(all_nodes)):
sys.exit(1)
return all_nodes
return nodes
def get_node_strings(args, nodes, justify=False):
""" Get string representations for the nodes. """
if args.ids:
strings = [str(x) for x in nodes]
else:
strings = [x.ip_address for x in nodes]
if strings and justify:
just_size = max(16, max(len(x) for x in strings) + 1)
strings = [x.ljust(just_size) for x in strings]
return dict(zip(nodes, strings))
# pylint: disable=R0915
def run_command(args, nodes, name, *method_args):
"""Runs a command on nodes."""
if args.threads != None:
task_queue = TaskQueue(threads=args.threads, delay=args.command_delay)
else:
task_queue = TaskQueue(delay=args.command_delay)
tasks = {}
for node in nodes:
target = node
for member in name.split("."):
target = getattr(target, member)
tasks[node] = task_queue.put(target, *method_args)
results = {}
errors = {}
try:
counter = 0
while any(x.is_alive() for x in tasks.values()):
if not args.quiet:
_print_command_status(tasks, counter)
counter += 1
time.sleep(0.25)
for node, task in tasks.iteritems():
if task.status == "Completed":
results[node] = task.result
else:
errors[node] = task.error
except KeyboardInterrupt:
args.retry = 0
for node, task in tasks.iteritems():
if task.status == "Completed":
results[node] = task.result
elif task.status == "Failed":
errors[node] = task.error
else:
errors[node] = KeyboardInterrupt(
"Aborted by keyboard interrupt"
)
if not args.quiet:
_print_command_status(tasks, counter)
print("\n")
# Handle errors
should_retry = False
if errors:
_print_errors(args, nodes, errors)
if args.retry == None:
sys.stdout.write("Retry command on failed hosts? (y/n): ")
sys.stdout.flush()
while True:
command = raw_input().strip().lower()
if command in ['y', 'yes']:
should_retry = True
break
elif command in ['n', 'no']:
print
break
elif args.retry >= 1:
should_retry = True
if args.retry == 1:
print("Retrying command 1 more time...")
elif args.retry > 1:
print("Retrying command %i more times..." % args.retry)
args.retry -= 1
if should_retry:
nodes = [x for x in nodes if x in errors]
new_results, errors = run_command(args, nodes, name, *method_args)
results.update(new_results)
return results, errors
def prompt_yes(prompt):
"""Prompts the user. """
sys.stdout.write("%s (y/n) " % prompt)
sys.stdout.flush()
while True:
command = raw_input().strip().lower()
if command in ['y', 'yes']:
print
return True
elif command in ['n', 'no']:
print
return False
def parse_host_entry(entry, hostfiles=None):
"""parse a host entry"""
if not(hostfiles):
hostfiles = set()
try:
return parse_hostfile_entry(entry, hostfiles)
except ValueError:
try:
return parse_ip_range_entry(entry)
except ValueError:
return [entry]
def parse_hostfile_entry(entry, hostfiles=None):
"""parse a hostfile entry, returning a list of hosts"""
if not(hostfiles):
hostfiles = set()
if entry.startswith('file='):
filename = entry[5:]
elif entry.startswith('hostfile='):
filename = entry[9:]
else:
raise ValueError('%s is not a hostfile entry' % entry)
if filename in hostfiles:
return []
hostfiles.add(filename)
entries = []
try:
for line in open(filename):
for element in line.partition('#')[0].split():
for hostfile_entry in element.split(','):
entries.extend(parse_host_entry(hostfile_entry, hostfiles))
except IOError:
print 'ERROR: %s is not a valid hostfile entry' % entry
sys.exit(1)
return entries
def parse_ip_range_entry(entry):
""" Get a list of ip addresses in a given range"""
try:
start, end = entry.split('-')
# Convert start address to int
start_bytes = [int(x) for x in start.split('.')]
start_i = ((start_bytes[0] << 24) | (start_bytes[1] << 16)
| (start_bytes[2] << 8) | (start_bytes[3]))
# Convert end address to int
end_bytes = [int(x) for x in end.split('.')]
end_i = ((end_bytes[0] << 24) | (end_bytes[1] << 16)
| (end_bytes[2] << 8) | (end_bytes[3]))
# Get ip addresses in range
addresses = []
for i in range(start_i, end_i + 1):
address_bytes = [(i >> (24 - 8 * x)) & 0xff for x in range(4)]
addresses.append('%i.%i.%i.%i' % tuple(address_bytes))
except (ValueError, IndexError):
raise ValueError('%s is not an IP range' % entry)
return addresses
def _print_errors(args, nodes, errors):
""" Print errors if they occured """
if errors:
node_strings = get_node_strings(args, nodes, justify=True)
print("Command failed on these hosts")
for node in nodes:
if node in errors:
print("%s: %s" % (node_strings[node], errors[node]))
print
# Print a special message for TFTP errors
if all(isinstance(x, TftpException) for x in errors.itervalues()):
print(
"There may be networking issues (when behind NAT) between " +
"the host (where cxmanage is running) and the Calxeda node " +
"when establishing a TFTP session. Please refer to the " +
"documentation for more information.\n"
)
def _print_command_status(tasks, counter):
""" Print the status of a command """
message = "\r%i successes | %i errors | %i nodes left | %s"
successes = len([x for x in tasks.values() if x.status == "Completed"])
errors = len([x for x in tasks.values() if x.status == "Failed"])
nodes_left = len(tasks) - successes - errors
dots = "".join(["." for x in range(counter % 4)]).ljust(3)
sys.stdout.write(message % (successes, errors, nodes_left, dots))
sys.stdout.flush()
|
{
"content_hash": "64f2b9e2dd5111dc8f7e9125d9e2c5bd",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 79,
"avg_line_length": 32.197101449275365,
"alnum_prop": 0.5448325531148721,
"repo_name": "Cynerva/cxmanage",
"id": "bb0f2f7e8992c9ceb830760a6798fe3bd6ba8ac5",
"size": "12640",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cxmanage_api/cli/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "416365"
},
{
"name": "Shell",
"bytes": "2366"
}
],
"symlink_target": ""
}
|
"""
FEZ graphics type readers
"""
from __future__ import print_function
from xnb_parse.type_reader import (TypeReaderPlugin, BaseTypeReader, ValueTypeReader, GenericTypeReader,
generic_reader_type)
from xnb_parse.type_readers.xna_graphics import PrimitiveTypeReader, Texture2DReader
from xnb_parse.type_readers.xna_math import MatrixReader, RectangleReader
from xnb_parse.type_readers.xna_primitive import StringReader, UInt16Reader
from xnb_parse.type_readers.xna_system import ListReader, ArrayReader, TimeSpanReader, ReflectiveReader
from xnb_parse.type_readers.fez.fez_basic import NpcActionReader, ActorTypeReader, SetReader, FaceOrientationReader
from xnb_parse.xna_types.xna_math import Vector3, Vector2
from xnb_parse.xna_types.fez.fez_graphics import (AnimatedTexture, Frame, ArtObject, ShaderInstancedIndexedPrimitives,
VertexPositionNormalTextureInstance, NpcMetadata, AnimatedTexturePC,
FramePC, ArtObjectPC)
# avoiding circular import
PLATFORM_WINDOWS = b'w'
class ArtObjectReader(BaseTypeReader, TypeReaderPlugin):
target_type = 'FezEngine.Structure.ArtObject'
reader_name = 'FezEngine.Readers.ArtObjectReader'
def read(self):
if self.file_platform == PLATFORM_WINDOWS:
name = self.stream.read_string()
cubemap = self.stream.read_object(Texture2DReader)
size = self.stream.read_vector3()
geometry = self.stream.read_object(ShaderInstancedIndexedPrimitivesReader,
[VertexPositionNormalTextureInstanceReader, MatrixReader])
actor_type = self.stream.read_object(ActorTypeReader)
no_silhouette = self.stream.read_boolean()
return ArtObjectPC(name, cubemap, size, geometry, actor_type, no_silhouette)
else:
name = self.stream.read_string()
cubemap_path = self.stream.read_string()
size = self.stream.read_vector3()
geometry = self.stream.read_object(ShaderInstancedIndexedPrimitivesReader,
[VertexPositionNormalTextureInstanceReader, MatrixReader])
actor_type = self.stream.read_object(ActorTypeReader)
no_silhouette = self.stream.read_boolean()
laser_outlets = self.stream.read_object(ReflectiveReader, [generic_reader_type(SetReader,
[FaceOrientationReader])])
return ArtObject(name, cubemap_path, size, geometry, actor_type, no_silhouette, laser_outlets)
class ShaderInstancedIndexedPrimitivesReader(GenericTypeReader, TypeReaderPlugin):
generic_target_type = 'FezEngine.Structure.Geometry.ShaderInstancedIndexedPrimitives`2'
generic_reader_name = 'FezEngine.Readers.ShaderInstancedIndexedPrimitivesReader`2'
def read(self):
primitive_type = self.stream.read_object(PrimitiveTypeReader)
vertices = self.stream.read_object(ArrayReader, [self.readers[0]])
indices = self.stream.read_object(ArrayReader, [UInt16Reader])
return ShaderInstancedIndexedPrimitives(primitive_type, vertices, indices)
class VertexPositionNormalTextureInstanceReader(ValueTypeReader, TypeReaderPlugin):
target_type = 'FezEngine.Structure.Geometry.VertexPositionNormalTextureInstance'
reader_name = 'FezEngine.Readers.VertexPositionNormalTextureInstanceReader'
def read(self):
values = self.stream.unpack('3f B 2f')
position = Vector3._make(values[0:3])
normal = values[3]
texture_coord = Vector2._make(values[4:6])
return VertexPositionNormalTextureInstance(position, normal, texture_coord)
class NpcMetadataReader(BaseTypeReader, TypeReaderPlugin):
target_type = 'FezEngine.Structure.NpcMetadata'
reader_name = 'FezEngine.Readers.NpcMetadataReader'
def read(self):
walk_speed = self.stream.read_single()
avoids_gomez = self.stream.read_boolean()
sound_path = self.stream.read_object(StringReader)
sound_actions = self.stream.read_object(ListReader, [NpcActionReader])
return NpcMetadata(walk_speed, avoids_gomez, sound_path, sound_actions)
class AnimatedTextureReader(BaseTypeReader, TypeReaderPlugin):
target_type = 'FezEngine.Structure.AnimatedTexture'
reader_name = 'FezEngine.Readers.AnimatedTextureReader'
def read(self):
if self.file_platform == PLATFORM_WINDOWS:
width = self.stream.read_int32()
height = self.stream.read_int32()
actual_width = self.stream.read_int32()
actual_height = self.stream.read_int32()
elements = self.stream.read_uint32()
data = self.stream.read(elements)
frames = self.stream.read_object(ListReader, [FrameReader])
return AnimatedTexturePC(width, height, actual_width, actual_height, data, frames)
else:
width = self.stream.read_int32()
height = self.stream.read_int32()
actual_width = self.stream.read_int32()
actual_height = self.stream.read_int32()
frames = self.stream.read_object(ListReader, [FrameReader])
return AnimatedTexture(width, height, actual_width, actual_height, frames)
class FrameReader(BaseTypeReader, TypeReaderPlugin):
target_type = 'FezEngine.Content.FrameContent'
reader_name = 'FezEngine.Readers.FrameReader'
def read(self):
if self.file_platform == PLATFORM_WINDOWS:
duration = self.stream.read_object(TimeSpanReader)
rectangle = self.stream.read_object(RectangleReader)
return FramePC(duration, rectangle)
else:
duration = self.stream.read_object(TimeSpanReader)
_ = self.stream.read_7bit_encoded_int()
elements = self.stream.read_uint32()
data = self.stream.read(elements * 4)
return Frame(duration, data)
|
{
"content_hash": "4c81345be700f4910cad4886574394e6",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 118,
"avg_line_length": 49.75409836065574,
"alnum_prop": 0.6782537067545304,
"repo_name": "fesh0r/xnb_parse",
"id": "42a3dee55d14535a8b510bf9aefa09e8dd1e86f9",
"size": "6070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xnb_parse/type_readers/fez/fez_graphics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3167"
},
{
"name": "Python",
"bytes": "290059"
},
{
"name": "Shell",
"bytes": "1385"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
{
"content_hash": "c49635bf318a564d7cf672670134a112",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 92,
"avg_line_length": 31.76923076923077,
"alnum_prop": 0.6246973365617433,
"repo_name": "bazelbuild/bazel-watcher",
"id": "9dd585101626935e07a66d69719a869f94b74b67",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/workplace_status.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "211167"
},
{
"name": "JavaScript",
"bytes": "6949"
},
{
"name": "Python",
"bytes": "497"
},
{
"name": "Shell",
"bytes": "1483"
},
{
"name": "Starlark",
"bytes": "33966"
}
],
"symlink_target": ""
}
|
import pythoncom
from win32com.shell import shell, shellcon
from win32com.server.policy import DesignatedWrapPolicy
tsf_flags = list((k,v) for k,v in shellcon.__dict__.items() if k.startswith('TSF_'))
def decode_flags(flags):
if flags == 0:
return 'TSF_NORMAL'
flag_txt = ''
for k,v in tsf_flags:
if flags & v:
if flag_txt:
flag_txt = flag_txt + '|' + k
else:
flag_txt = k
return flag_txt
TRANSFER_ADVISE_STATES = {}
for k,v in shellcon.__dict__.items():
if k.startswith('TS_'):
TRANSFER_ADVISE_STATES[v] = k
def decode_flags(flags):
if flags == 0:
return 'TSF_NORMAL'
flag_txt = ''
for k,v in tsf_flags:
if flags & v:
if flag_txt:
flag_txt = flag_txt + '|' + k
else:
flag_txt = k
return flag_txt
class TransferAdviseSink(DesignatedWrapPolicy):
_com_interfaces_ = [shell.IID_ITransferAdviseSink]
_public_methods_ = [
"UpdateProgress", "UpdateTransferState", "ConfirmOverwrite",
"ConfirmEncryptionLoss", "FileFailure", "SubStreamFailure", "PropertyFailure"
]
def __init__(self):
self._wrap_(self)
def UpdateProgress(self, SizeCurrent, SizeTotal, FilesCurrent, FilesTotal, FoldersCurrent, FoldersTotal):
print ('UpdateProgress - processed so far:')
print ('\t %s out of %s bytes' %(SizeCurrent, SizeTotal))
print ('\t %s out of %s files' %(FilesCurrent, FilesTotal))
print ('\t %s out of %s folders' %(FoldersCurrent, FoldersTotal))
def UpdateTransferState(self, State):
print ('Current state: ', TRANSFER_ADVISE_STATES.get(State, '??? Unknown state %s ???' %State))
def ConfirmOverwrite(self, Source, DestParent , Name):
print ('ConfirmOverwrite: ', Source.GetDisplayName(shellcon.SHGDN_FORPARSING),
DestParent.GetDisplayName(shellcon.SHGDN_FORPARSING),
Name)
def ConfirmEncryptionLoss(self, Source):
print ('ConfirmEncryptionLoss:', Source.GetDisplayName(shellcon.SHGDN_FORPARSING))
def FileFailure(self, Item, ItemName , Error):
print ('FileFailure:', Item.GetDisplayName(shellcon.SHGDN_FORPARSING), ItemName)
def SubStreamFailure(self, Item, StreamName , Error):
print ('SubStreamFailure:\n')
def PropertyFailure(self, Item, key , Error):
print ('PropertyFailure:\n')
def CreateSink():
return pythoncom.WrapObject(TransferAdviseSink(), shell.IID_ITransferAdviseSink, shell.IID_ITransferAdviseSink)
|
{
"content_hash": "784ae57daffc1383897bbafc096ecb3a",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 115,
"avg_line_length": 33.013698630136986,
"alnum_prop": 0.6800829875518672,
"repo_name": "ntuecon/server",
"id": "040cd0b53bbd769031979f92b84abe8f70697ef5",
"size": "2457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyenv/Lib/site-packages/win32comext/shell/demos/ITransferAdviseSink.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2209"
},
{
"name": "Batchfile",
"bytes": "1509"
},
{
"name": "C",
"bytes": "504013"
},
{
"name": "C++",
"bytes": "96440"
},
{
"name": "CSS",
"bytes": "133288"
},
{
"name": "GAP",
"bytes": "18122"
},
{
"name": "HTML",
"bytes": "150026"
},
{
"name": "JavaScript",
"bytes": "243314"
},
{
"name": "Objective-C",
"bytes": "1292"
},
{
"name": "PowerShell",
"bytes": "8325"
},
{
"name": "Python",
"bytes": "27048260"
},
{
"name": "Shell",
"bytes": "47820"
},
{
"name": "Tcl",
"bytes": "1237796"
},
{
"name": "Visual Basic",
"bytes": "949"
},
{
"name": "XSLT",
"bytes": "2113"
}
],
"symlink_target": ""
}
|
import logging
logger = logging.getLogger(__name__)
class Context:
def __init__(self, checker, newcontext):
self.checker = checker
self.newcontext = newcontext
def __enter__(self):
self.checker.context.append(self.newcontext)
def __exit__(self, type, value, traceback):
self.checker.context.pop()
class CompatibilityChecker:
def __init__(self, fonts):
self.errors = []
self.context = []
self.okay = True
self.fonts = fonts
def check(self):
first = self.fonts[0]
skip_export_glyphs = set(first.lib.get("public.skipExportGlyphs", ()))
for glyph in first.keys():
if glyph in skip_export_glyphs:
continue
self.current_fonts = [font for font in self.fonts if glyph in font]
glyphs = [font[glyph] for font in self.current_fonts]
with Context(self, f"glyph {glyph}"):
self.check_glyph(glyphs)
return self.okay
def check_glyph(self, glyphs):
if self.ensure_all_same(len, glyphs, "number of contours"):
for ix, contours in enumerate(zip(*glyphs)):
with Context(self, f"contour {ix}"):
self.check_contours(contours)
anchors = [g.anchors for g in glyphs]
self.ensure_all_same(
lambda anchors: '"' + (", ".join(sorted(a.name for a in anchors))) + '"',
anchors,
"anchors",
)
components = [g.components for g in glyphs]
if self.ensure_all_same(len, components, "number of components"):
for ix, component in enumerate(zip(*components)):
with Context(self, f"component {ix}"):
self.ensure_all_same(lambda c: c.baseGlyph, component, "base glyph")
def check_contours(self, contours):
if not self.ensure_all_same(len, contours, "number of points"):
return
for ix, point in enumerate(zip(*contours)):
with Context(self, f"point {ix}"):
self.ensure_all_same(lambda x: x.type, point, "point type")
def ensure_all_same(self, func, objs, what):
values = {}
context = ", ".join(self.context)
for obj, font in zip(objs, self.current_fonts):
values.setdefault(func(obj), []).append(self._name_for(font))
if len(values) < 2:
logger.debug(f"All fonts had same {what} in {context}")
return True
logger.error(f"Fonts had differing {what} in {context}:")
debug_enabled = logger.isEnabledFor(logging.DEBUG)
for value, fonts in values.items():
if debug_enabled or len(fonts) <= 6:
key = ", ".join(fonts)
else:
key = f"{len(fonts)} fonts"
logger.error(f" * {key} had {value}")
self.okay = False
return False
def _name_for(self, font):
names = list(filter(None, [font.info.familyName, font.info.styleName]))
return " ".join(names)
|
{
"content_hash": "c46b8a6fcd42e85c48600b92e8636c86",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 88,
"avg_line_length": 36.29761904761905,
"alnum_prop": 0.5647753361757953,
"repo_name": "googlei18n/fontmake",
"id": "8a164cee56f08c6dfbcb6144ed15a1cf15171164",
"size": "3049",
"binary": false,
"copies": "2",
"ref": "refs/heads/pyup-scheduled-update-2022-10-10",
"path": "Lib/fontmake/compatibility.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "70083"
},
{
"name": "Shell",
"bytes": "4075"
}
],
"symlink_target": ""
}
|
"""Accesses the google.monitoring.v3 GroupService API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import google.api_core.protobuf_helpers
import grpc
from google.cloud.monitoring_v3.gapic import enums
from google.cloud.monitoring_v3.gapic import group_service_client_config
from google.cloud.monitoring_v3.gapic.transports import group_service_grpc_transport
from google.cloud.monitoring_v3.proto import alert_pb2
from google.cloud.monitoring_v3.proto import alert_service_pb2
from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc
from google.cloud.monitoring_v3.proto import common_pb2
from google.cloud.monitoring_v3.proto import group_pb2
from google.cloud.monitoring_v3.proto import group_service_pb2
from google.cloud.monitoring_v3.proto import group_service_pb2_grpc
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
"google-cloud-monitoring"
).version
class GroupServiceClient(object):
"""
The Group API lets you inspect and manage your
`groups <#google.monitoring.v3.Group>`__.
A group is a named filter that is used to identify a collection of
monitored resources. Groups are typically used to mirror the physical
and/or logical topology of the environment. Because group membership is
computed dynamically, monitored resources that are started in the future
are automatically placed in matching groups. By using a group to name
monitored resources in, for example, an alert policy, the target of that
alert policy is updated automatically as monitored resources are added
and removed from the infrastructure.
"""
SERVICE_ADDRESS = "monitoring.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.monitoring.v3.GroupService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
GroupServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
@classmethod
def group_path(cls, project, group):
"""Return a fully-qualified group string."""
return google.api_core.path_template.expand(
"projects/{project}/groups/{group}", project=project, group=group
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
):
"""Constructor.
Args:
transport (Union[~.GroupServiceGrpcTransport,
Callable[[~.Credentials, type], ~.GroupServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = group_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=group_service_grpc_transport.GroupServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = group_service_grpc_transport.GroupServiceGrpcTransport(
address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_groups(
self,
name,
children_of_group=None,
ancestors_of_group=None,
descendants_of_group=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the existing groups.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_groups(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_groups(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project whose groups are to be listed. The format is
``"projects/{project_id_or_number}"``.
children_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups whose ``parentName`` field contains the group name. If no
groups have this parent, the results are empty.
ancestors_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups that are ancestors of the specified group. The groups are
returned in order, starting with the immediate parent and ending with
the most distant ancestor. If the specified group has no immediate
parent, the results are empty.
descendants_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns the descendants of the specified group. This is a superset of
the results returned by the ``childrenOfGroup`` filter, and includes
children-of-children, and so forth.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.monitoring_v3.types.Group` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_groups" not in self._inner_api_calls:
self._inner_api_calls[
"list_groups"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_groups,
default_retry=self._method_configs["ListGroups"].retry,
default_timeout=self._method_configs["ListGroups"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
children_of_group=children_of_group,
ancestors_of_group=ancestors_of_group,
descendants_of_group=descendants_of_group,
)
request = group_service_pb2.ListGroupsRequest(
name=name,
children_of_group=children_of_group,
ancestors_of_group=ancestors_of_group,
descendants_of_group=descendants_of_group,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_groups"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="group",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_group(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a single group.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.group_path('[PROJECT]', '[GROUP]')
>>>
>>> response = client.get_group(name)
Args:
name (str): The group to retrieve. The format is
``"projects/{project_id_or_number}/groups/{group_id}"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.Group` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "get_group" not in self._inner_api_calls:
self._inner_api_calls[
"get_group"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_group,
default_retry=self._method_configs["GetGroup"].retry,
default_timeout=self._method_configs["GetGroup"].timeout,
client_info=self._client_info,
)
request = group_service_pb2.GetGroupRequest(name=name)
return self._inner_api_calls["get_group"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_group(
self,
name,
group,
validate_only=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new group.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `group`:
>>> group = {}
>>>
>>> response = client.create_group(name, group)
Args:
name (str): The project in which to create the group. The format is
``"projects/{project_id_or_number}"``.
group (Union[dict, ~google.cloud.monitoring_v3.types.Group]): A group definition. It is an error to define the ``name`` field because
the system assigns the name.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.Group`
validate_only (bool): If true, validate this request but do not create the group.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.Group` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "create_group" not in self._inner_api_calls:
self._inner_api_calls[
"create_group"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_group,
default_retry=self._method_configs["CreateGroup"].retry,
default_timeout=self._method_configs["CreateGroup"].timeout,
client_info=self._client_info,
)
request = group_service_pb2.CreateGroupRequest(
name=name, group=group, validate_only=validate_only
)
return self._inner_api_calls["create_group"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def update_group(
self,
group,
validate_only=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an existing group. You can change any group attributes except
``name``.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> # TODO: Initialize `group`:
>>> group = {}
>>>
>>> response = client.update_group(group)
Args:
group (Union[dict, ~google.cloud.monitoring_v3.types.Group]): The new definition of the group. All fields of the existing group,
excepting ``name``, are replaced with the corresponding fields of this
group.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.Group`
validate_only (bool): If true, validate this request but do not update the existing group.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.Group` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "update_group" not in self._inner_api_calls:
self._inner_api_calls[
"update_group"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_group,
default_retry=self._method_configs["UpdateGroup"].retry,
default_timeout=self._method_configs["UpdateGroup"].timeout,
client_info=self._client_info,
)
request = group_service_pb2.UpdateGroupRequest(
group=group, validate_only=validate_only
)
return self._inner_api_calls["update_group"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_group(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes an existing group.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.group_path('[PROJECT]', '[GROUP]')
>>>
>>> client.delete_group(name)
Args:
name (str): The group to delete. The format is
``"projects/{project_id_or_number}/groups/{group_id}"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "delete_group" not in self._inner_api_calls:
self._inner_api_calls[
"delete_group"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_group,
default_retry=self._method_configs["DeleteGroup"].retry,
default_timeout=self._method_configs["DeleteGroup"].timeout,
client_info=self._client_info,
)
request = group_service_pb2.DeleteGroupRequest(name=name)
self._inner_api_calls["delete_group"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_group_members(
self,
name,
page_size=None,
filter_=None,
interval=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the monitored resources that are members of a group.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.group_path('[PROJECT]', '[GROUP]')
>>>
>>> # Iterate over all results
>>> for element in client.list_group_members(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_group_members(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The group whose members are listed. The format is
``"projects/{project_id_or_number}/groups/{group_id}"``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
filter_ (str): An optional `list
filter <https://cloud.google.com/monitoring/api/learn_more#filtering>`__
describing the members to be returned. The filter may reference the
type, labels, and metadata of monitored resources that comprise the
group. For example, to return only resources representing Compute Engine
VM instances, use this filter:
::
resource.type = "gce_instance"
interval (Union[dict, ~google.cloud.monitoring_v3.types.TimeInterval]): An optional time interval for which results should be returned. Only
members that were part of the group during the specified interval are
included in the response. If no interval is provided then the group
membership over the last minute is returned.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.TimeInterval`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.monitoring_v3.types.MonitoredResource` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_group_members" not in self._inner_api_calls:
self._inner_api_calls[
"list_group_members"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_group_members,
default_retry=self._method_configs["ListGroupMembers"].retry,
default_timeout=self._method_configs["ListGroupMembers"].timeout,
client_info=self._client_info,
)
request = group_service_pb2.ListGroupMembersRequest(
name=name, page_size=page_size, filter=filter_, interval=interval
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_group_members"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="members",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
|
{
"content_hash": "efed638e12d75298cca76bb2bb54684f",
"timestamp": "",
"source": "github",
"line_count": 682,
"max_line_length": 152,
"avg_line_length": 43.183284457478,
"alnum_prop": 0.5911514040270279,
"repo_name": "dhermes/gcloud-python",
"id": "8a28b7b7dc32564ffccb9c24be2df67d88f492ed",
"size": "30052",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "monitoring/google/cloud/monitoring_v3/gapic/group_service_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "95635"
},
{
"name": "Python",
"bytes": "2871895"
},
{
"name": "Shell",
"bytes": "4683"
}
],
"symlink_target": ""
}
|
"""
Test lldb process launch flags.
"""
from __future__ import print_function
import copy
import os
import time
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import six
class ProcessLaunchTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
self.runCmd("settings set auto-confirm true")
def tearDown(self):
self.runCmd("settings clear auto-confirm")
TestBase.tearDown(self)
@not_remote_testsuite_ready
def test_io(self):
"""Test that process launch I/O redirection flags work properly."""
self.build()
exe = self.getBuildArtifact("a.out")
self.expect("file " + exe,
patterns=["Current executable set to .*a.out"])
in_file = os.path.join(self.getSourceDir(), "input-file.txt")
out_file = lldbutil.append_to_process_working_directory(self, "output-test.out")
err_file = lldbutil.append_to_process_working_directory(self, "output-test.err")
# Make sure the output files do not exist before launching the process
try:
os.remove(out_file)
except OSError:
pass
try:
os.remove(err_file)
except OSError:
pass
launch_command = "process launch -i '{0}' -o '{1}' -e '{2}' -w '{3}'".format(
in_file, out_file, err_file, self.get_process_working_directory())
if lldb.remote_platform:
self.runCmd('platform put-file "{local}" "{remote}"'.format(
local=in_file, remote=in_file))
self.expect(launch_command,
patterns=["Process .* launched: .*a.out"])
success = True
err_msg = ""
out = lldbutil.read_file_on_target(self, out_file)
if out != "This should go to stdout.\n":
success = False
err_msg = err_msg + " ERROR: stdout file does not contain correct output.\n"
err = lldbutil.read_file_on_target(self, err_file)
if err != "This should go to stderr.\n":
success = False
err_msg = err_msg + " ERROR: stderr file does not contain correct output.\n"
if not success:
self.fail(err_msg)
# rdar://problem/9056462
# The process launch flag '-w' for setting the current working directory
# not working?
@not_remote_testsuite_ready
@expectedFailureAll(oslist=["linux"], bugnumber="llvm.org/pr20265")
def test_set_working_dir_nonexisting(self):
"""Test that '-w dir' fails to set the working dir when running the inferior with a dir which doesn't exist."""
d = {'CXX_SOURCES': 'print_cwd.cpp'}
self.build(dictionary=d)
self.setTearDownCleanup(d)
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe)
mywd = 'my_working_dir'
out_file_name = "my_working_dir_test.out"
err_file_name = "my_working_dir_test.err"
my_working_dir_path = self.getBuildArtifact(mywd)
out_file_path = os.path.join(my_working_dir_path, out_file_name)
err_file_path = os.path.join(my_working_dir_path, err_file_name)
# Check that we get an error when we have a nonexisting path
invalid_dir_path = mywd + 'z'
launch_command = "process launch -w %s -o %s -e %s" % (
invalid_dir_path, out_file_path, err_file_path)
self.expect(
launch_command, error=True, patterns=[
"error:.* No such file or directory: %s" %
invalid_dir_path])
@not_remote_testsuite_ready
def test_set_working_dir_existing(self):
"""Test that '-w dir' sets the working dir when running the inferior."""
d = {'CXX_SOURCES': 'print_cwd.cpp'}
self.build(dictionary=d)
self.setTearDownCleanup(d)
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe)
mywd = 'my_working_dir'
out_file_name = "my_working_dir_test.out"
err_file_name = "my_working_dir_test.err"
my_working_dir_path = self.getBuildArtifact(mywd)
lldbutil.mkdir_p(my_working_dir_path)
out_file_path = os.path.join(my_working_dir_path, out_file_name)
err_file_path = os.path.join(my_working_dir_path, err_file_name)
# Make sure the output files do not exist before launching the process
try:
os.remove(out_file_path)
os.remove(err_file_path)
except OSError:
pass
launch_command = "process launch -w %s -o %s -e %s" % (
my_working_dir_path, out_file_path, err_file_path)
self.expect(launch_command,
patterns=["Process .* launched: .*a.out"])
success = True
err_msg = ""
# Check to see if the 'stdout' file was created
try:
out_f = open(out_file_path)
except IOError:
success = False
err_msg = err_msg + "ERROR: stdout file was not created.\n"
else:
# Check to see if the 'stdout' file contains the right output
line = out_f.readline()
if self.TraceOn():
print("line:", line)
if not re.search(mywd, line):
success = False
err_msg = err_msg + "The current working directory was not set correctly.\n"
out_f.close()
# Try to delete the 'stdout' and 'stderr' files
try:
os.remove(out_file_path)
os.remove(err_file_path)
pass
except OSError:
pass
if not success:
self.fail(err_msg)
def test_environment_with_special_char(self):
"""Test that environment variables containing '*' and '}' are handled correctly by the inferior."""
source = 'print_env.cpp'
d = {'CXX_SOURCES': source}
self.build(dictionary=d)
self.setTearDownCleanup(d)
exe = self.getBuildArtifact("a.out")
evil_var = 'INIT*MIDDLE}TAIL'
target = self.dbg.CreateTarget(exe)
main_source_spec = lldb.SBFileSpec(source)
breakpoint = target.BreakpointCreateBySourceRegex(
'// Set breakpoint here.', main_source_spec)
process = target.LaunchSimple(None,
['EVIL=' + evil_var],
self.get_process_working_directory())
self.assertEqual(
process.GetState(),
lldb.eStateStopped,
PROCESS_STOPPED)
threads = lldbutil.get_threads_stopped_at_breakpoint(
process, breakpoint)
self.assertEqual(len(threads), 1)
frame = threads[0].GetFrameAtIndex(0)
sbvalue = frame.EvaluateExpression("evil")
value = sbvalue.GetSummary().strip('"')
self.assertEqual(value, evil_var)
process.Continue()
self.assertEqual(process.GetState(), lldb.eStateExited, PROCESS_EXITED)
pass
|
{
"content_hash": "86dd18669bb83f4f4932f166d55fc76b",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 119,
"avg_line_length": 34.47596153846154,
"alnum_prop": 0.5824850090642867,
"repo_name": "youtube/cobalt_sandbox",
"id": "9d1cac90d85693d0a146c12f9c3cf56acf1215f9",
"size": "7171",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "third_party/llvm-project/lldb/packages/Python/lldbsuite/test/functionalities/process_launch/TestProcessLaunch.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import os
import importlib
from collections import OrderedDict
import json
from modularodm import storage
from werkzeug.contrib.fixers import ProxyFix
import framework
from framework.render.core import init_mfr
from framework.flask import app, add_handlers
from framework.logging import logger
from framework.mongo import set_up_storage
from framework.addons.utils import render_addon_capabilities
from framework.sentry import sentry
from framework.mongo import handlers as mongo_handlers
from framework.tasks import handlers as task_handlers
from framework.transactions import handlers as transaction_handlers
import website.models
from website.routes import make_url_map
from website.addons.base import init_addon
from website.project.model import ensure_schemas, Node
def build_js_config_files(settings):
with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'wb') as fp:
json.dump(Node.CATEGORY_MAP, fp)
def init_addons(settings, routes=True):
"""Initialize each addon in settings.ADDONS_REQUESTED.
:param module settings: The settings module.
:param bool routes: Add each addon's routing rules to the URL map.
"""
settings.ADDONS_AVAILABLE = getattr(settings, 'ADDONS_AVAILABLE', [])
settings.ADDONS_AVAILABLE_DICT = getattr(settings, 'ADDONS_AVAILABLE_DICT', OrderedDict())
for addon_name in settings.ADDONS_REQUESTED:
addon = init_addon(app, addon_name, routes=routes)
if addon:
if addon not in settings.ADDONS_AVAILABLE:
settings.ADDONS_AVAILABLE.append(addon)
settings.ADDONS_AVAILABLE_DICT[addon.short_name] = addon
settings.ADDON_CAPABILITIES = render_addon_capabilities(settings.ADDONS_AVAILABLE)
def add_cors_headers(response):
response.headers['Access-Control-Allow-Origin'] = '*'
return response
def attach_handlers(app, settings):
"""Add callback handlers to ``app`` in the correct order."""
# Add callback handlers to application
add_handlers(app, mongo_handlers.handlers)
add_handlers(app, task_handlers.handlers)
add_handlers(app, transaction_handlers.handlers)
# Attach handler for checking view-only link keys.
# NOTE: This must be attached AFTER the TokuMX to avoid calling
# a commitTransaction (in toku's after_request handler) when no transaction
# has been created
add_handlers(app, {'before_request': framework.sessions.prepare_private_key})
# framework.session's before_request handler must go after
# prepare_private_key, else view-only links won't work
add_handlers(app, {'before_request': framework.sessions.before_request})
# Needed to allow the offload server and main server to properly interact
# without cors issues. See @jmcarp, @chrisseto, or @icereval for more detail
if settings.DEBUG_MODE:
add_handlers(app, {'after_request': add_cors_headers})
return app
def build_addon_log_templates(build_fp, settings):
for addon in settings.ADDONS_REQUESTED:
log_path = os.path.join(settings.ADDON_PATH, addon, 'templates', 'log_templates.mako')
try:
with open(log_path) as addon_fp:
build_fp.write(addon_fp.read())
except IOError:
pass
def build_log_templates(settings):
"""Write header and core templates to the built log templates file."""
with open(settings.BUILT_TEMPLATES, 'w') as build_fp:
build_fp.write('## Built templates file. DO NOT MODIFY.\n')
with open(settings.CORE_TEMPLATES) as core_fp:
# Exclude comments in core templates mako file
content = '\n'.join([line.rstrip() for line in
core_fp.readlines() if not line.strip().startswith('##')])
build_fp.write(content)
build_fp.write('\n')
build_addon_log_templates(build_fp, settings)
def init_app(settings_module='website.settings', set_backends=True, routes=True, mfr=False,
attach_request_handlers=True):
"""Initializes the OSF. A sort of pseudo-app factory that allows you to
bind settings, set up routing, and set storage backends, but only acts on
a single app instance (rather than creating multiple instances).
:param settings_module: A string, the settings module to use.
:param set_backends: Whether to set the database storage backends.
:param routes: Whether to set the url map.
"""
# The settings module
settings = importlib.import_module(settings_module)
build_log_templates(settings)
init_addons(settings, routes)
build_js_config_files(settings)
app.debug = settings.DEBUG_MODE
if mfr:
init_mfr(app)
if set_backends:
logger.debug('Setting storage backends')
set_up_storage(
website.models.MODELS,
storage.MongoStorage,
addons=settings.ADDONS_AVAILABLE,
)
if routes:
try:
make_url_map(app)
except AssertionError: # Route map has already been created
pass
if attach_request_handlers:
attach_handlers(app, settings)
if app.debug:
logger.info("Sentry disabled; Flask's debug mode enabled")
else:
sentry.init_app(app)
logger.info("Sentry enabled; Flask's debug mode disabled")
if set_backends:
ensure_schemas()
apply_middlewares(app, settings)
return app
def apply_middlewares(flask_app, settings):
# Use ProxyFix to respect X-Forwarded-Proto header
# https://stackoverflow.com/questions/23347387/x-forwarded-proto-and-flask
if settings.LOAD_BALANCER:
flask_app.wsgi_app = ProxyFix(flask_app.wsgi_app)
return flask_app
|
{
"content_hash": "fe1250ff8c04ae63dbc1a08a16cfe790",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 96,
"avg_line_length": 37.421052631578945,
"alnum_prop": 0.6998945147679325,
"repo_name": "zkraime/osf.io",
"id": "7e4dd8dc32ea44ce7ce56f0a90c4f541098ef41a",
"size": "5713",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "website/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "84847"
},
{
"name": "HTML",
"bytes": "16454"
},
{
"name": "JavaScript",
"bytes": "973662"
},
{
"name": "Mako",
"bytes": "470539"
},
{
"name": "Python",
"bytes": "2645021"
},
{
"name": "Shell",
"bytes": "234"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
import SCFpy
import os
def extra_dependencies():
import sys
ret = []
if sys.version_info < (2, 7):
ret.append('argparse')
return ret
def read(*names):
values = dict()
extensions = ['.txt', '.rst']
for name in names:
value = ''
for extension in extensions:
filename = name + extension
if os.path.isfile(filename):
value = open(name + extension).read()
break
values[name] = value
return values
long_description = """
%(README)s
News
====
%(CHANGES)s
""" % read('README', 'CHANGES')
setup(
name='SCFpy',
version=SCFpy.__version__,
description='A Simple restricted Hartree-Fock code',
long_description=long_description,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering :: Chemistry",
],
keywords='A Simple restricted Hartree-Fock code',
author='Pu Du',
author_email='[email protected]',
maintainer='Pu Du',
maintainer_email='[email protected]',
url='https://github.com/ipudu/SCFpy',
license='MIT',
packages=find_packages(),
entry_points={
'console_scripts': [
'scfpy = SCFpy.main:command_line_runner',
]
},
install_requires=[
'numpy',
] + extra_dependencies(),
)
|
{
"content_hash": "1026503bc13f212646a7a603d9541be2",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 56,
"avg_line_length": 26.470588235294116,
"alnum_prop": 0.5772222222222222,
"repo_name": "pudu1991/SCFpy",
"id": "36fe01db95825debb633af4068b45498bd16252e",
"size": "1823",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16432"
}
],
"symlink_target": ""
}
|
"""Support for Prometheus metrics export."""
from contextlib import suppress
import logging
import string
from aiohttp import web
import prometheus_client
import voluptuous as vol
from homeassistant import core as hacore
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODES,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_ACTIONS,
)
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.humidifier.const import (
ATTR_AVAILABLE_MODES,
ATTR_HUMIDITY,
)
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_DEVICE_CLASS,
ATTR_FRIENDLY_NAME,
ATTR_MODE,
ATTR_TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT,
CONTENT_TYPE_TEXT_PLAIN,
EVENT_STATE_CHANGED,
PERCENTAGE,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entityfilter, state as state_helper
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.helpers.entity_values import EntityValues
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.temperature import fahrenheit_to_celsius
_LOGGER = logging.getLogger(__name__)
API_ENDPOINT = "/api/prometheus"
DOMAIN = "prometheus"
CONF_FILTER = "filter"
CONF_PROM_NAMESPACE = "namespace"
CONF_COMPONENT_CONFIG = "component_config"
CONF_COMPONENT_CONFIG_GLOB = "component_config_glob"
CONF_COMPONENT_CONFIG_DOMAIN = "component_config_domain"
CONF_DEFAULT_METRIC = "default_metric"
CONF_OVERRIDE_METRIC = "override_metric"
COMPONENT_CONFIG_SCHEMA_ENTRY = vol.Schema(
{vol.Optional(CONF_OVERRIDE_METRIC): cv.string}
)
DEFAULT_NAMESPACE = "homeassistant"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
{
vol.Optional(CONF_FILTER, default={}): entityfilter.FILTER_SCHEMA,
vol.Optional(CONF_PROM_NAMESPACE, default=DEFAULT_NAMESPACE): cv.string,
vol.Optional(CONF_DEFAULT_METRIC): cv.string,
vol.Optional(CONF_OVERRIDE_METRIC): cv.string,
vol.Optional(CONF_COMPONENT_CONFIG, default={}): vol.Schema(
{cv.entity_id: COMPONENT_CONFIG_SCHEMA_ENTRY}
),
vol.Optional(CONF_COMPONENT_CONFIG_GLOB, default={}): vol.Schema(
{cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY}
),
vol.Optional(CONF_COMPONENT_CONFIG_DOMAIN, default={}): vol.Schema(
{cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY}
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Activate Prometheus component."""
hass.http.register_view(PrometheusView(prometheus_client))
conf = config[DOMAIN]
entity_filter = conf[CONF_FILTER]
namespace = conf.get(CONF_PROM_NAMESPACE)
climate_units = hass.config.units.temperature_unit
override_metric = conf.get(CONF_OVERRIDE_METRIC)
default_metric = conf.get(CONF_DEFAULT_METRIC)
component_config = EntityValues(
conf[CONF_COMPONENT_CONFIG],
conf[CONF_COMPONENT_CONFIG_DOMAIN],
conf[CONF_COMPONENT_CONFIG_GLOB],
)
metrics = PrometheusMetrics(
prometheus_client,
entity_filter,
namespace,
climate_units,
component_config,
override_metric,
default_metric,
)
hass.bus.listen(EVENT_STATE_CHANGED, metrics.handle_state_changed)
hass.bus.listen(
EVENT_ENTITY_REGISTRY_UPDATED, metrics.handle_entity_registry_updated
)
return True
class PrometheusMetrics:
"""Model all of the metrics which should be exposed to Prometheus."""
def __init__(
self,
prometheus_cli,
entity_filter,
namespace,
climate_units,
component_config,
override_metric,
default_metric,
):
"""Initialize Prometheus Metrics."""
self.prometheus_cli = prometheus_cli
self._component_config = component_config
self._override_metric = override_metric
self._default_metric = default_metric
self._filter = entity_filter
self._sensor_metric_handlers = [
self._sensor_override_component_metric,
self._sensor_override_metric,
self._sensor_attribute_metric,
self._sensor_default_metric,
self._sensor_fallback_metric,
]
if namespace:
self.metrics_prefix = f"{namespace}_"
else:
self.metrics_prefix = ""
self._metrics = {}
self._climate_units = climate_units
def handle_state_changed(self, event):
"""Listen for new messages on the bus, and add them to Prometheus."""
if (state := event.data.get("new_state")) is None:
return
entity_id = state.entity_id
_LOGGER.debug("Handling state update for %s", entity_id)
domain, _ = hacore.split_entity_id(entity_id)
if not self._filter(state.entity_id):
return
if (old_state := event.data.get("old_state")) is not None and (
old_friendly_name := old_state.attributes.get(ATTR_FRIENDLY_NAME)
) != state.attributes.get(ATTR_FRIENDLY_NAME):
self._remove_labelsets(old_state.entity_id, old_friendly_name)
ignored_states = (STATE_UNAVAILABLE, STATE_UNKNOWN)
handler = f"_handle_{domain}"
if hasattr(self, handler) and state.state not in ignored_states:
getattr(self, handler)(state)
labels = self._labels(state)
state_change = self._metric(
"state_change", self.prometheus_cli.Counter, "The number of state changes"
)
state_change.labels(**labels).inc()
entity_available = self._metric(
"entity_available",
self.prometheus_cli.Gauge,
"Entity is available (not in the unavailable or unknown state)",
)
entity_available.labels(**labels).set(float(state.state not in ignored_states))
last_updated_time_seconds = self._metric(
"last_updated_time_seconds",
self.prometheus_cli.Gauge,
"The last_updated timestamp",
)
last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp())
def handle_entity_registry_updated(self, event):
"""Listen for deleted, disabled or renamed entities and remove them from the Prometheus Registry."""
if (action := event.data.get("action")) in (None, "create"):
return
entity_id = event.data.get("entity_id")
_LOGGER.debug("Handling entity update for %s", entity_id)
metrics_entity_id = None
if action == "remove":
metrics_entity_id = entity_id
elif action == "update":
changes = event.data.get("changes")
if "entity_id" in changes:
metrics_entity_id = changes["entity_id"]
elif "disabled_by" in changes:
metrics_entity_id = entity_id
if metrics_entity_id:
self._remove_labelsets(metrics_entity_id)
def _remove_labelsets(self, entity_id, friendly_name=None):
"""Remove labelsets matching the given entity id from all metrics."""
for _, metric in self._metrics.items():
for sample in metric.collect()[0].samples:
if sample.labels["entity"] == entity_id and (
not friendly_name or sample.labels["friendly_name"] == friendly_name
):
_LOGGER.debug(
"Removing labelset from %s for entity_id: %s",
sample.name,
entity_id,
)
try:
metric.remove(*sample.labels.values())
except KeyError:
pass
def _handle_attributes(self, state):
for key, value in state.attributes.items():
metric = self._metric(
f"{state.domain}_attr_{key.lower()}",
self.prometheus_cli.Gauge,
f"{key} attribute of {state.domain} entity",
)
try:
value = float(value)
metric.labels(**self._labels(state)).set(value)
except (ValueError, TypeError):
pass
def _metric(self, metric, factory, documentation, extra_labels=None):
labels = ["entity", "friendly_name", "domain"]
if extra_labels is not None:
labels.extend(extra_labels)
try:
return self._metrics[metric]
except KeyError:
full_metric_name = self._sanitize_metric_name(
f"{self.metrics_prefix}{metric}"
)
self._metrics[metric] = factory(
full_metric_name,
documentation,
labels,
registry=self.prometheus_cli.REGISTRY,
)
return self._metrics[metric]
@staticmethod
def _sanitize_metric_name(metric: str) -> str:
return "".join(
[
c
if c in string.ascii_letters
or c in string.digits
or c == "_"
or c == ":"
else f"u{hex(ord(c))}"
for c in metric
]
)
@staticmethod
def state_as_number(state):
"""Return a state casted to a float."""
try:
value = state_helper.state_as_number(state)
except ValueError:
_LOGGER.debug("Could not convert %s to float", state)
value = 0
return value
@staticmethod
def _labels(state):
return {
"entity": state.entity_id,
"domain": state.domain,
"friendly_name": state.attributes.get(ATTR_FRIENDLY_NAME),
}
def _battery(self, state):
if "battery_level" in state.attributes:
metric = self._metric(
"battery_level_percent",
self.prometheus_cli.Gauge,
"Battery level as a percentage of its capacity",
)
try:
value = float(state.attributes[ATTR_BATTERY_LEVEL])
metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
def _handle_binary_sensor(self, state):
metric = self._metric(
"binary_sensor_state",
self.prometheus_cli.Gauge,
"State of the binary sensor (0/1)",
)
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
def _handle_input_boolean(self, state):
metric = self._metric(
"input_boolean_state",
self.prometheus_cli.Gauge,
"State of the input boolean (0/1)",
)
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
def _handle_input_number(self, state):
if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)):
metric = self._metric(
f"input_number_state_{unit}",
self.prometheus_cli.Gauge,
f"State of the input number measured in {unit}",
)
else:
metric = self._metric(
"input_number_state",
self.prometheus_cli.Gauge,
"State of the input number",
)
with suppress(ValueError):
value = self.state_as_number(state)
if state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_FAHRENHEIT:
value = fahrenheit_to_celsius(value)
metric.labels(**self._labels(state)).set(value)
def _handle_device_tracker(self, state):
metric = self._metric(
"device_tracker_state",
self.prometheus_cli.Gauge,
"State of the device tracker (0/1)",
)
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
def _handle_person(self, state):
metric = self._metric(
"person_state", self.prometheus_cli.Gauge, "State of the person (0/1)"
)
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
def _handle_light(self, state):
metric = self._metric(
"light_brightness_percent",
self.prometheus_cli.Gauge,
"Light brightness percentage (0..100)",
)
try:
if "brightness" in state.attributes and state.state == STATE_ON:
value = state.attributes["brightness"] / 255.0
else:
value = self.state_as_number(state)
value = value * 100
metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
def _handle_lock(self, state):
metric = self._metric(
"lock_state", self.prometheus_cli.Gauge, "State of the lock (0/1)"
)
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
def _handle_climate_temp(self, state, attr, metric_name, metric_description):
if temp := state.attributes.get(attr):
if self._climate_units == TEMP_FAHRENHEIT:
temp = fahrenheit_to_celsius(temp)
metric = self._metric(
metric_name,
self.prometheus_cli.Gauge,
metric_description,
)
metric.labels(**self._labels(state)).set(temp)
def _handle_climate(self, state):
self._handle_climate_temp(
state,
ATTR_TEMPERATURE,
"climate_target_temperature_celsius",
"Target temperature in degrees Celsius",
)
self._handle_climate_temp(
state,
ATTR_TARGET_TEMP_HIGH,
"climate_target_temperature_high_celsius",
"Target high temperature in degrees Celsius",
)
self._handle_climate_temp(
state,
ATTR_TARGET_TEMP_LOW,
"climate_target_temperature_low_celsius",
"Target low temperature in degrees Celsius",
)
self._handle_climate_temp(
state,
ATTR_CURRENT_TEMPERATURE,
"climate_current_temperature_celsius",
"Current temperature in degrees Celsius",
)
if current_action := state.attributes.get(ATTR_HVAC_ACTION):
metric = self._metric(
"climate_action",
self.prometheus_cli.Gauge,
"HVAC action",
["action"],
)
for action in CURRENT_HVAC_ACTIONS:
metric.labels(**dict(self._labels(state), action=action)).set(
float(action == current_action)
)
current_mode = state.state
available_modes = state.attributes.get(ATTR_HVAC_MODES)
if current_mode and available_modes:
metric = self._metric(
"climate_mode",
self.prometheus_cli.Gauge,
"HVAC mode",
["mode"],
)
for mode in available_modes:
metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == current_mode)
)
def _handle_humidifier(self, state):
humidifier_target_humidity_percent = state.attributes.get(ATTR_HUMIDITY)
if humidifier_target_humidity_percent:
metric = self._metric(
"humidifier_target_humidity_percent",
self.prometheus_cli.Gauge,
"Target Relative Humidity",
)
metric.labels(**self._labels(state)).set(humidifier_target_humidity_percent)
metric = self._metric(
"humidifier_state",
self.prometheus_cli.Gauge,
"State of the humidifier (0/1)",
)
try:
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
current_mode = state.attributes.get(ATTR_MODE)
available_modes = state.attributes.get(ATTR_AVAILABLE_MODES)
if current_mode and available_modes:
metric = self._metric(
"humidifier_mode",
self.prometheus_cli.Gauge,
"Humidifier Mode",
["mode"],
)
for mode in available_modes:
metric.labels(**dict(self._labels(state), mode=mode)).set(
float(mode == current_mode)
)
def _handle_sensor(self, state):
unit = self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT))
for metric_handler in self._sensor_metric_handlers:
metric = metric_handler(state, unit)
if metric is not None:
break
if metric is not None:
documentation = "State of the sensor"
if unit:
documentation = f"Sensor data measured in {unit}"
_metric = self._metric(metric, self.prometheus_cli.Gauge, documentation)
try:
value = self.state_as_number(state)
if state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_FAHRENHEIT:
value = fahrenheit_to_celsius(value)
_metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
self._battery(state)
def _sensor_default_metric(self, state, unit):
"""Get default metric."""
return self._default_metric
@staticmethod
def _sensor_attribute_metric(state, unit):
"""Get metric based on device class attribute."""
metric = state.attributes.get(ATTR_DEVICE_CLASS)
if metric is not None:
return f"sensor_{metric}_{unit}"
return None
def _sensor_override_metric(self, state, unit):
"""Get metric from override in configuration."""
if self._override_metric:
return self._override_metric
return None
def _sensor_override_component_metric(self, state, unit):
"""Get metric from override in component confioguration."""
return self._component_config.get(state.entity_id).get(CONF_OVERRIDE_METRIC)
@staticmethod
def _sensor_fallback_metric(state, unit):
"""Get metric from fallback logic for compatibility."""
if unit in (None, ""):
try:
state_helper.state_as_number(state)
except ValueError:
_LOGGER.debug("Unsupported sensor: %s", state.entity_id)
return None
return "sensor_state"
return f"sensor_unit_{unit}"
@staticmethod
def _unit_string(unit):
"""Get a formatted string of the unit."""
if unit is None:
return
units = {
TEMP_CELSIUS: "celsius",
TEMP_FAHRENHEIT: "celsius", # F should go into C metric
PERCENTAGE: "percent",
}
default = unit.replace("/", "_per_")
default = default.lower()
return units.get(unit, default)
def _handle_switch(self, state):
metric = self._metric(
"switch_state", self.prometheus_cli.Gauge, "State of the switch (0/1)"
)
try:
value = self.state_as_number(state)
metric.labels(**self._labels(state)).set(value)
except ValueError:
pass
self._handle_attributes(state)
def _handle_zwave(self, state):
self._battery(state)
def _handle_automation(self, state):
metric = self._metric(
"automation_triggered_count",
self.prometheus_cli.Counter,
"Count of times an automation has been triggered",
)
metric.labels(**self._labels(state)).inc()
def _handle_counter(self, state):
metric = self._metric(
"counter_value",
self.prometheus_cli.Gauge,
"Value of counter entities",
)
metric.labels(**self._labels(state)).set(self.state_as_number(state))
class PrometheusView(HomeAssistantView):
"""Handle Prometheus requests."""
url = API_ENDPOINT
name = "api:prometheus"
def __init__(self, prometheus_cli):
"""Initialize Prometheus view."""
self.prometheus_cli = prometheus_cli
async def get(self, request):
"""Handle request for Prometheus metrics."""
_LOGGER.debug("Received Prometheus metrics request")
return web.Response(
body=self.prometheus_cli.generate_latest(self.prometheus_cli.REGISTRY),
content_type=CONTENT_TYPE_TEXT_PLAIN,
)
|
{
"content_hash": "f65f9b8d7a1ed74938bcab6c1f500164",
"timestamp": "",
"source": "github",
"line_count": 621,
"max_line_length": 108,
"avg_line_length": 34.111111111111114,
"alnum_prop": 0.572959448614455,
"repo_name": "rohitranjan1991/home-assistant",
"id": "7690ee5d1fc43d1766b4a858cc46482816641de3",
"size": "21183",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/prometheus/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
import unittest
from .test_sqlite import TestSQLite
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "519014bb5a6638629904799a5c2aa7b7",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 35,
"avg_line_length": 16.833333333333332,
"alnum_prop": 0.6534653465346535,
"repo_name": "moskytw/mosql",
"id": "fc0f1874ab1563dc193fa2a94c7188195a06b15a",
"size": "148",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "oldtests/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "219"
},
{
"name": "Makefile",
"bytes": "315"
},
{
"name": "Python",
"bytes": "89256"
}
],
"symlink_target": ""
}
|
"""Locations where we look for configs, install stuff, etc"""
import getpass
import os
import os.path
import site
import sys
import tempfile
from distutils import sysconfig
from distutils.command.install import install, SCHEME_KEYS
from pip import appdirs
from pip.compat import get_path_uid
import pip.exceptions
# Hack for flake8
install
# CA Bundle Locations
CA_BUNDLE_PATHS = [
# Debian/Ubuntu/Gentoo etc.
"/etc/ssl/certs/ca-certificates.crt",
# Fedora/RHEL
"/etc/pki/tls/certs/ca-bundle.crt",
# OpenSUSE
"/etc/ssl/ca-bundle.pem",
# OpenBSD
"/etc/ssl/cert.pem",
# FreeBSD/DragonFly
"/usr/local/share/certs/ca-root-nss.crt",
# Homebrew on OSX
"/usr/local/etc/openssl/cert.pem",
]
# Attempt to locate a CA Bundle that we can pass into requests, we have a list
# of possible ones from various systems. If we cannot find one then we'll set
# this to None so that we default to whatever requests is setup to handle.
#
# Note to Downstream: If you wish to disable this autodetection and simply use
# whatever requests does (likely you've already patched
# requests.certs.where()) then simply edit this line so
# that it reads ``CA_BUNDLE_PATH = None``.
CA_BUNDLE_PATH = next((x for x in CA_BUNDLE_PATHS if os.path.exists(x)), None)
# Application Directories
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.
Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
def write_delete_marker_file(directory):
"""
Write the pip delete marker file into this directory.
"""
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
marker_fp = open(filepath, 'w')
marker_fp.write(DELETE_MARKER_MESSAGE)
marker_fp.close()
def running_under_virtualenv():
"""
Return True if we're running inside a virtualenv, False otherwise.
"""
if hasattr(sys, 'real_prefix'):
return True
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
return True
return False
def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
# this mirrors the logic in virtualenv.py for locating the
# no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if running_under_virtualenv() and os.path.isfile(no_global_file):
return True
def __get_username():
""" Returns the effective username of the current process. """
if sys.platform == 'win32':
return getpass.getuser()
import pwd
return pwd.getpwuid(os.geteuid()).pw_name
def _get_build_prefix():
""" Returns a safe build_prefix """
path = os.path.join(
tempfile.gettempdir(),
'pip_build_%s' % __get_username()
)
if sys.platform == 'win32':
""" on windows(tested on 7) temp dirs are isolated """
return path
try:
os.mkdir(path)
write_delete_marker_file(path)
except OSError:
file_uid = None
try:
# raises OSError for symlinks
# https://github.com/pypa/pip/pull/935#discussion_r5307003
file_uid = get_path_uid(path)
except OSError:
file_uid = None
if file_uid != os.geteuid():
msg = (
"The temporary folder for building (%s) is either not owned by"
" you, or is a symlink." % path
)
print(msg)
print(
"pip will not work until the temporary folder is either "
"deleted or is a real directory owned by your user account."
)
raise pip.exceptions.InstallationError(msg)
return path
if running_under_virtualenv():
build_prefix = os.path.join(sys.prefix, 'build')
src_prefix = os.path.join(sys.prefix, 'src')
else:
# Note: intentionally NOT using mkdtemp
# See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp
build_prefix = _get_build_prefix()
# FIXME: keep src in cwd for now (it is not a temporary folder)
try:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
sys.exit(
"The folder you are executing pip from can no longer be found."
)
# under Mac OS X + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
build_prefix = os.path.abspath(os.path.realpath(build_prefix))
src_prefix = os.path.abspath(src_prefix)
# FIXME doesn't account for venv linked to global site-packages
site_packages = sysconfig.get_python_lib()
user_site = site.USER_SITE
user_dir = os.path.expanduser('~')
if sys.platform == 'win32':
bin_py = os.path.join(sys.prefix, 'Scripts')
bin_user = os.path.join(user_site, 'Scripts')
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin')
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_basename = 'pip.ini'
default_config_file = os.path.join(
default_storage_dir,
default_config_basename,
)
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin')
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_basename = 'pip.conf'
default_config_file = os.path.join(
default_storage_dir,
default_config_basename,
)
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
def distutils_scheme(dist_name, user=False, home=None, root=None):
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
scheme = {}
d = Distribution({'name': dist_name})
d.parse_config_files()
i = d.get_command_obj('install', create=True)
# NOTE: setting user or home has the side-effect of creating the home dir
# or user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
i.user = user or i.user
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_' + key)
if i.install_lib is not None:
# install_lib takes precedence over purelib and platlib
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
if running_under_virtualenv():
scheme['headers'] = os.path.join(
sys.prefix,
'include',
'site',
'python' + sys.version[:3],
dist_name,
)
if root is not None:
scheme["headers"] = os.path.join(
root,
os.path.abspath(scheme["headers"])[1:],
)
return scheme
|
{
"content_hash": "f5052b4aac937fa8ab04767fc4afc916",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 79,
"avg_line_length": 31.33061224489796,
"alnum_prop": 0.6375716519020324,
"repo_name": "laborautonomo/pip",
"id": "8adadbee251876b93c0fd86e7ac1fe02b1389564",
"size": "7676",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pip/locations.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
"""The consistencygroups api."""
from oslo_log import log as logging
from oslo_utils import strutils
import webob
from webob import exc
from cinder.api import common
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.views import consistencygroups as consistencygroup_views
from cinder import consistencygroup as consistencygroupAPI
from cinder import exception
from cinder.i18n import _, _LI
LOG = logging.getLogger(__name__)
class ConsistencyGroupsController(wsgi.Controller):
"""The ConsistencyGroups API controller for the OpenStack API."""
_view_builder_class = consistencygroup_views.ViewBuilder
def __init__(self):
self.consistencygroup_api = consistencygroupAPI.API()
super(ConsistencyGroupsController, self).__init__()
def show(self, req, id):
"""Return data about the given consistency group."""
LOG.debug('show called for member %s', id)
context = req.environ['cinder.context']
try:
consistencygroup = self.consistencygroup_api.get(
context,
group_id=id)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return self._view_builder.detail(req, consistencygroup)
def delete(self, req, id, body):
"""Delete a consistency group."""
LOG.debug('delete called for member %s', id)
context = req.environ['cinder.context']
force = False
if body:
if not self.is_valid_body(body, 'consistencygroup'):
msg = _("Missing required element 'consistencygroup' in "
"request body.")
raise exc.HTTPBadRequest(explanation=msg)
cg_body = body['consistencygroup']
try:
force = strutils.bool_from_string(cg_body.get('force', False),
strict=True)
except ValueError:
msg = _("Invalid value '%s' for force.") % force
raise exc.HTTPBadRequest(explanation=msg)
LOG.info(_LI('Delete consistency group with id: %s'), id,
context=context)
try:
group = self.consistencygroup_api.get(context, id)
self.consistencygroup_api.delete(context, group, force)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=202)
def index(self, req):
"""Returns a summary list of consistency groups."""
return self._get_consistencygroups(req, is_detail=False)
def detail(self, req):
"""Returns a detailed list of consistency groups."""
return self._get_consistencygroups(req, is_detail=True)
def _get_consistencygroups(self, req, is_detail):
"""Returns a list of consistency groups through view builder."""
context = req.environ['cinder.context']
filters = req.params.copy()
marker, limit, offset = common.get_pagination_params(filters)
sort_keys, sort_dirs = common.get_sort_params(filters)
consistencygroups = self.consistencygroup_api.get_all(
context, filters=filters, marker=marker, limit=limit,
offset=offset, sort_keys=sort_keys, sort_dirs=sort_dirs)
if is_detail:
consistencygroups = self._view_builder.detail_list(
req, consistencygroups)
else:
consistencygroups = self._view_builder.summary_list(
req, consistencygroups)
return consistencygroups
@wsgi.response(202)
def create(self, req, body):
"""Create a new consistency group."""
LOG.debug('Creating new consistency group %s', body)
self.assert_valid_body(body, 'consistencygroup')
context = req.environ['cinder.context']
consistencygroup = body['consistencygroup']
self.validate_name_and_description(consistencygroup)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
volume_types = consistencygroup.get('volume_types', None)
if not volume_types:
msg = _("volume_types must be provided to create "
"consistency group %(name)s.") % {'name': name}
raise exc.HTTPBadRequest(explanation=msg)
availability_zone = consistencygroup.get('availability_zone', None)
LOG.info(_LI("Creating consistency group %(name)s."),
{'name': name},
context=context)
try:
new_consistencygroup = self.consistencygroup_api.create(
context, name, description, volume_types,
availability_zone=availability_zone)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.InvalidVolumeType as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
retval = self._view_builder.summary(req, new_consistencygroup)
return retval
@wsgi.response(202)
def create_from_src(self, req, body):
"""Create a new consistency group from a source.
The source can be a CG snapshot or a CG. Note that
this does not require volume_types as the "create"
API above.
"""
LOG.debug('Creating new consistency group %s.', body)
self.assert_valid_body(body, 'consistencygroup-from-src')
context = req.environ['cinder.context']
consistencygroup = body['consistencygroup-from-src']
self.validate_name_and_description(consistencygroup)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
cgsnapshot_id = consistencygroup.get('cgsnapshot_id', None)
source_cgid = consistencygroup.get('source_cgid', None)
if not cgsnapshot_id and not source_cgid:
msg = _("Either 'cgsnapshot_id' or 'source_cgid' must be "
"provided to create consistency group %(name)s "
"from source.") % {'name': name}
raise exc.HTTPBadRequest(explanation=msg)
if cgsnapshot_id and source_cgid:
msg = _("Cannot provide both 'cgsnapshot_id' and 'source_cgid' "
"to create consistency group %(name)s from "
"source.") % {'name': name}
raise exc.HTTPBadRequest(explanation=msg)
if cgsnapshot_id:
LOG.info(_LI("Creating consistency group %(name)s from "
"cgsnapshot %(snap)s."),
{'name': name, 'snap': cgsnapshot_id},
context=context)
elif source_cgid:
LOG.info(_LI("Creating consistency group %(name)s from "
"source consistency group %(source_cgid)s."),
{'name': name, 'source_cgid': source_cgid},
context=context)
try:
new_consistencygroup = self.consistencygroup_api.create_from_src(
context, name, description, cgsnapshot_id, source_cgid)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.CgSnapshotNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.CinderException as error:
raise exc.HTTPBadRequest(explanation=error.msg)
retval = self._view_builder.summary(req, new_consistencygroup)
return retval
def _check_update_parameters(self, name, description, add_volumes,
remove_volumes):
if not (name or description or add_volumes or remove_volumes):
msg = _("Name, description, add_volumes, and remove_volumes "
"can not be all empty in the request body.")
raise exc.HTTPBadRequest(explanation=msg)
def _update(self, context, id, name, description, add_volumes,
remove_volumes,
allow_empty=False):
LOG.info(_LI("Updating consistency group %(id)s with name %(name)s "
"description: %(description)s add_volumes: "
"%(add_volumes)s remove_volumes: %(remove_volumes)s."),
{'id': id,
'name': name,
'description': description,
'add_volumes': add_volumes,
'remove_volumes': remove_volumes},
context=context)
try:
group = self.consistencygroup_api.get(context, id)
self.consistencygroup_api.update(
context, group, name, description,
add_volumes, remove_volumes, allow_empty)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
def update(self, req, id, body):
"""Update the consistency group.
Expected format of the input parameter 'body':
.. code-block:: json
{
"consistencygroup":
{
"name": "my_cg",
"description": "My consistency group",
"add_volumes": "volume-uuid-1,volume-uuid-2,...",
"remove_volumes": "volume-uuid-8,volume-uuid-9,..."
}
}
"""
LOG.debug('Update called for consistency group %s.', id)
if not body:
msg = _("Missing request body.")
raise exc.HTTPBadRequest(explanation=msg)
self.assert_valid_body(body, 'consistencygroup')
context = req.environ['cinder.context']
consistencygroup = body.get('consistencygroup', None)
self.validate_name_and_description(consistencygroup)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
add_volumes = consistencygroup.get('add_volumes', None)
remove_volumes = consistencygroup.get('remove_volumes', None)
self._check_update_parameters(name, description, add_volumes,
remove_volumes)
self._update(context, id, name, description, add_volumes,
remove_volumes)
return webob.Response(status_int=202)
class Consistencygroups(extensions.ExtensionDescriptor):
"""consistency groups support."""
name = 'Consistencygroups'
alias = 'consistencygroups'
updated = '2014-08-18T00:00:00+00:00'
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
Consistencygroups.alias, ConsistencyGroupsController(),
collection_actions={'detail': 'GET', 'create_from_src': 'POST'},
member_actions={'delete': 'POST', 'update': 'PUT'})
resources.append(res)
return resources
|
{
"content_hash": "57accf5c08ee467b1fd0df76352a8711",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 78,
"avg_line_length": 41.677536231884055,
"alnum_prop": 0.6098409110666783,
"repo_name": "bswartz/cinder",
"id": "a9abc3aef869abb1a324e77cdb7e5fc77dd3052e",
"size": "12146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/api/contrib/consistencygroups.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16345375"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
}
|
while True:
s = input('Enter something: ')
if s == 'quit':
break
if len(s) < 3:
print('Too small')
continue
print('Input is of sufficient length')
|
{
"content_hash": "b9cc67befd2104e4d55da94bf3b81bae",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 42,
"avg_line_length": 18.5,
"alnum_prop": 0.5351351351351351,
"repo_name": "louistin/thinkstation",
"id": "3ba4969fad21bb809acc5cbab148cd6f65342a32",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "a_byte_of_python/unit_7_control_flow/continue.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2640"
},
{
"name": "C++",
"bytes": "21715"
},
{
"name": "GCC Machine Description",
"bytes": "559"
},
{
"name": "Go",
"bytes": "3714"
},
{
"name": "Python",
"bytes": "26890"
}
],
"symlink_target": ""
}
|
import logging
logger = logging.getLogger("membership.signals")
from django.dispatch import Signal
# Signals
send_as_email = Signal(providing_args=["instance"])
send_preapprove_email = Signal(providing_args=["instance", "user"])
send_duplicate_payment_notice = Signal(providing_args=["instance","user","billingcycle"])
|
{
"content_hash": "a405164128d6c91f13ae34ab0d1a946f",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 89,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.7663551401869159,
"repo_name": "AriMartti/sikteeri",
"id": "762cf79128e0e849aaf610eeb3556ab81344cc7d",
"size": "346",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "membership/signals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21557"
},
{
"name": "HTML",
"bytes": "56926"
},
{
"name": "JavaScript",
"bytes": "13176"
},
{
"name": "Python",
"bytes": "369068"
},
{
"name": "Shell",
"bytes": "2188"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, unicode_literals
import os
import sys
import tarfile
import subprocess
import shutil
import io
if sys.version_info[0] < 3:
from urllib import urlopen
else:
from urllib.request import urlopen
try:
import argparse as ap
except ImportError:
import pyne._argparse as ap
absexpanduser = lambda x: os.path.abspath(os.path.expanduser(x))
def check_windows_cmake(cmake_cmd):
if os.name == 'nt':
files_on_path = set()
for p in os.environ['PATH'].split(';')[::-1]:
if os.path.exists(p):
files_on_path.update(os.listdir(p))
if 'cl.exe' in files_on_path:
pass
elif 'sh.exe' in files_on_path:
cmake_cmd += ['-G "MSYS Makefiles"']
elif 'gcc.exe' in files_on_path:
cmake_cmd += ['-G "MinGW Makefiles"']
cmake_cmd = ' '.join(cmake_cmd)
def update_describe():
root_dir = os.path.split(__file__)[0]
fname = os.path.join(root_dir, 'src', 'version.cc.in')
cmd = 'touch {0}'.format(fname)
subprocess.check_call(cmd.split(), shell=(os.name == 'nt'))
def install_cyclus(args):
if not os.path.exists(args.build_dir):
os.mkdir(args.build_dir)
elif args.clean_build:
shutil.rmtree(args.build_dir)
os.mkdir(args.build_dir)
root_dir = os.path.split(__file__)[0]
makefile = os.path.join(args.build_dir, 'Makefile')
if not os.path.exists(makefile):
rtn = subprocess.call(['which', 'cmake'], shell=(os.name == 'nt'))
if rtn != 0:
sys.exit("CMake could not be found, "
"please install CMake before developing Cyclus.")
cmake_cmd = ['cmake', os.path.abspath(root_dir)]
if args.prefix:
cmake_cmd += ['-DCMAKE_INSTALL_PREFIX=' +
absexpanduser(args.prefix)]
if args.cmake_prefix_path:
cmake_cmd += ['-DCMAKE_PREFIX_PATH=' +
absexpanduser(args.cmake_prefix_path)]
if args.coin_root:
cmake_cmd += ['-DCOIN_ROOT_DIR=' + absexpanduser(args.coin_root)]
if args.boost_root:
cmake_cmd += ['-DBOOST_ROOT=' + absexpanduser(args.boost_root)]
if args.build_type:
cmake_cmd += ['-DCMAKE_BUILD_TYPE=' + args.build_type]
if args.D is not None:
cmake_cmd += ['-D' + x for x in args.D]
check_windows_cmake(cmake_cmd)
rtn = subprocess.check_call(cmake_cmd, cwd=args.build_dir,
shell=(os.name == 'nt'))
if args.config_only:
return
if args.update:
update_describe()
make_cmd = ['make']
if args.threads:
make_cmd += ['-j' + str(args.threads)]
rtn = subprocess.check_call(make_cmd, cwd=args.build_dir,
shell=(os.name == 'nt'))
if args.test:
make_cmd += ['test']
elif not args.build_only:
make_cmd += ['install']
rtn = subprocess.check_call(make_cmd, cwd=args.build_dir,
shell=(os.name == 'nt'))
def uninstall_cyclus(args):
makefile = os.path.join(args.build_dir, 'Makefile')
if not os.path.exists(args.build_dir) or not os.path.exists(makefile):
sys.exist("May not uninstall Cyclus since it has not yet been built.")
rtn = subprocess.check_call(['make', 'uninstall'], cwd=args.build_dir,
shell=(os.name == 'nt'))
def main():
localdir = absexpanduser('~/.local')
description = "A Cyclus installation helper script. " +\
"For more information, please see cyclus.github.com."
parser = ap.ArgumentParser(description=description)
build_dir = 'where to place the build directory'
parser.add_argument('--build_dir', help=build_dir, default='build')
uninst = 'uninstall'
parser.add_argument('--uninstall', action='store_true', help=uninst, default=False)
noupdate = 'do not update the hash in version.cc'
parser.add_argument('--no-update', dest='update', action='store_false',
help=noupdate, default=True)
clean = 'attempt to remove the build directory before building'
parser.add_argument('--clean-build', action='store_true', help=clean)
threads = "the number of threads to use in the make step"
parser.add_argument('-j', '--threads', type=int, help=threads)
prefix = "the relative path to the installation directory"
parser.add_argument('--prefix', help=prefix, default=localdir)
config_only = 'only configure the package, do not build or install'
parser.add_argument('--config-only', action='store_true', help=config_only)
build_only = 'only build the package, do not install'
parser.add_argument('--build-only', action='store_true', help=build_only)
test = 'run tests after building'
parser.add_argument('--test', action='store_true', help=test)
coin = "the relative path to the Coin-OR libraries directory"
parser.add_argument('--coin_root', help=coin)
boost = "the relative path to the Boost libraries directory"
parser.add_argument('--boost_root', help=boost)
cmake_prefix_path = "the cmake prefix path for use with FIND_PACKAGE, " + \
"FIND_PATH, FIND_PROGRAM, or FIND_LIBRARY macros"
parser.add_argument('--cmake_prefix_path', help=cmake_prefix_path)
build_type = "the CMAKE_BUILD_TYPE"
parser.add_argument('--build_type', help=build_type)
parser.add_argument('-D', metavar='VAR', action='append',
help='Set enviornment variable(s).')
args = parser.parse_args()
if args.uninstall:
uninstall_cyclus(args)
else:
install_cyclus(args)
if __name__ == "__main__":
main()
|
{
"content_hash": "4dd7084bbc9d2dda48406cae8d2cab20",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 87,
"avg_line_length": 35.77639751552795,
"alnum_prop": 0.6048611111111111,
"repo_name": "gidden/cyclus",
"id": "75cf78faed90cc44642f31974b3bab40020f92d6",
"size": "5783",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "install.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1654"
},
{
"name": "C++",
"bytes": "3761378"
},
{
"name": "CMake",
"bytes": "103455"
},
{
"name": "Python",
"bytes": "202059"
},
{
"name": "Shell",
"bytes": "4323"
}
],
"symlink_target": ""
}
|
"""
Prefix processing procedures
"""
from ipv4seq import *
def getholes(prefix, (ipstack), opt_g=False):
holes = []
netunion = []
while len(ipstack):
cur_prefix = ipstack.pop()
sum_prefix = netsum(cur_prefix, prefix)
if len(sum_prefix) and opt_g:
prefix = (sum_prefix[0], sum_prefix[1], 0,)
continue
elif issubnet(cur_prefix, prefix) and prefix[2] == cur_prefix[2]:
prefix = cur_prefix
break
elif isseq(cur_prefix, prefix) and opt_g:
if prefix[1] <= cur_prefix[1]:
netunion.extend(ipstack)
netunion.append(cur_prefix)
ipstack[:] = []
break
else:
ipstack.append(cur_prefix)
break
else:
for gapprefix, mask, aspath in subnets(cur_prefix[0] + ipaddrcount(cur_prefix[1]), prefix[0]):
holes.append((gapprefix, mask, aspath))
netunion.extend(ipstack)
netunion.append(cur_prefix)
ipstack[:] = []
break
ipstack.append(prefix)
return holes, netunion, ipstack
|
{
"content_hash": "8405e75c2e7e9925e8243c40d8cecd84",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 106,
"avg_line_length": 30.2,
"alnum_prop": 0.5107615894039735,
"repo_name": "Urlandi/bgptablehole",
"id": "125dd2e8d6926ed2e0344484c9d53bb4fd365b25",
"size": "1233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ipv4holes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5151"
},
{
"name": "Python",
"bytes": "13837"
}
],
"symlink_target": ""
}
|
from __future__ import division, print_function, unicode_literals, \
absolute_import
"""
Wulff construction to create the nanoparticle
"""
from six.moves import range
import itertools
from math import gcd
from functools import reduce
import numpy as np
from pymatgen.core.structure import Structure, Molecule
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.util.coord import in_coord_list
from mpinterfaces import get_struct_from_mp
from mpinterfaces.default_logger import get_default_logger
logger = get_default_logger(__name__)
class Nanoparticle(Molecule):
"""
Construct nanoparticle using wulff construction
"""
def __init__(self, structure, rmax=15, hkl_family=((1, 0, 0), (1, 1, 1)),
surface_energies=(28, 25)):
self.structure = structure
self.rmax = rmax
self.hkl_family = list(hkl_family)
self.surface_energies = list(surface_energies)
spherical_neighbors = self.structure.get_sites_in_sphere(
[0.0, 0.0, 0.0], self.rmax)
recp_lattice = self.structure.lattice.reciprocal_lattice_crystallographic
self.recp_lattice = recp_lattice.scale(1)
self.set_miller_family()
Molecule.__init__(self, [sn[0].species_and_occu
for sn in spherical_neighbors],
[sn[0].coords for sn in spherical_neighbors],
charge=0)
def set_miller_family(self):
"""
get all miller indices for the given maximum index
get the list of indices that correspond to the given family
of indices
"""
recp_structure = Structure(self.recp_lattice, ["H"], [[0, 0, 0]])
analyzer = SpacegroupAnalyzer(recp_structure, symprec=0.001)
symm_ops = analyzer.get_symmetry_operations()
max_index = max(max(m) for m in self.hkl_family)
r = list(range(-max_index, max_index + 1))
r.reverse()
miller_indices = []
self.all_equiv_millers = []
self.all_surface_energies = []
for miller in itertools.product(r, r, r):
if any([i != 0 for i in miller]):
d = abs(reduce(gcd, miller))
miller_index = tuple([int(i / d) for i in miller])
for op in symm_ops:
for i, u_miller in enumerate(self.hkl_family):
if in_coord_list(u_miller, op.operate(miller_index)):
self.all_equiv_millers.append(miller_index)
self.all_surface_energies.append(
self.surface_energies[i])
def get_normals(self):
"""
get the normal to the plane (h,k,l)
"""
normals = []
for hkl in self.all_equiv_millers:
normal = self.recp_lattice.matrix[0, :] * hkl[0] + \
self.recp_lattice.matrix[1, :] * hkl[1] + \
self.recp_lattice.matrix[2, :] * hkl[2]
normals.append(normal / np.linalg.norm(normal))
return normals
def get_centered_molecule(self):
center = self.center_of_mass
new_coords = np.array(self.cart_coords) - center
return Molecule(self.species_and_occu, new_coords,
charge=self._charge,
spin_multiplicity=self._spin_multiplicity,
site_properties=self.site_properties)
def create(self):
"""
creates the nanoparticle by chopping of the corners normal to the
specified surfaces.
the distance to the surface from the center of the particel =
normalized surface energy * max radius
"""
mol = self.get_centered_molecule()
normalized_surface_energies = \
np.array(self.all_surface_energies) / float(
max(self.all_surface_energies))
surface_normals = self.get_normals()
remove_sites = []
for i, site in enumerate(mol):
for j, normal in enumerate(surface_normals):
n = np.array(normal)
n = n / np.linalg.norm(n)
if np.dot(site.coords, n) + self.rmax * \
normalized_surface_energies[j] <= 0:
remove_sites.append(i)
break
self.remove_sites(remove_sites)
# new_sites = [site for k, site in enumerate(mol) if k not in remove_sites]
# return Molecule.from_sites(new_sites)
if __name__ == '__main__':
# nanopartcle settings
# max radius in angstroms
rmax = 15
# surface families to be chopped off
surface_families = [(1, 0, 0), (1, 1, 1)]
# could be in any units, will be normalized
surface_energies = [28, 25]
# caution: set the structure wrt which the the miller indices are specified
# use your own API key
structure = get_struct_from_mp('PbS')
# primitve ---> conventional cell
sa = SpacegroupAnalyzer(structure)
structure_conventional = sa.get_conventional_standard_structure()
nanoparticle = Nanoparticle(structure_conventional, rmax=rmax,
hkl_family=surface_families,
surface_energies=surface_energies)
nanoparticle.create()
nanoparticle.to(fmt='xyz', filename='nanoparticle.xyz')
"""
Wulff construction using the ASE package
works only for cubic systems and doesn't support multiatom basis
from ase.cluster import wulff_construction
from pymatgen.io.aseio import AseAtomsAdaptor
symbol = 'Pt'
surfaces = [ (1,0,0), (1,1,1) ]
surface_energies = [1, 1]
size = 200 #number of atoms
structure = "fcc"
latticeconstant = 5.0
atoms = wulff_construction(symbol, surfaces, surface_energies, size, structure,
rounding='closest', latticeconstant=latticeconstant,
debug=False, maxiter=100)
#convert to pymatgen structure
pgen_structure = AseAtomsAdaptor().get_structure(atoms)
pgen_structure.to(fmt='poscar', filename='POSCAR_pt_nano.vasp')
"""
|
{
"content_hash": "626adc23029ebfd0e2d815bece0e9be3",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 83,
"avg_line_length": 37.33742331288344,
"alnum_prop": 0.5989155438711797,
"repo_name": "henniggroup/MPInterfaces",
"id": "de6e60538bf173846c5cf4e0459874d96e9db459",
"size": "6182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpinterfaces/nanoparticle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "573231"
},
{
"name": "Shell",
"bytes": "1931"
}
],
"symlink_target": ""
}
|
"""Test configs for reciprocal."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function("make_reciprocal_tests")
def make_reciprocal_tests(options):
"""Make a set of tests to do reciprocal."""
# Chose a set of parameters
test_parameters = [{
"input_dtype": [tf.float32, tf.int32, tf.int64],
"input_shape": [[1, 2], [1, 2, 3, 4], [10]],
}]
def build_graph(parameters):
"""Build the graph for cond tests."""
input_tensor = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
name="input",
shape=parameters["input_shape"])
out = tf.math.reciprocal(input_tensor)
return [input_tensor], [out]
def build_inputs(parameters, sess, inputs, outputs):
input_values = [
create_tensor_data(parameters["input_dtype"], parameters["input_shape"])
]
return input_values, sess.run(
outputs, feed_dict=dict(zip(inputs, input_values)))
make_zip_of_tests(
options,
test_parameters,
build_graph,
build_inputs,
expected_tf_failures=6)
|
{
"content_hash": "6c5aa624546d9c2779631828d5348d0a",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 80,
"avg_line_length": 31.545454545454547,
"alnum_prop": 0.6772334293948127,
"repo_name": "sarvex/tensorflow",
"id": "1253d26c86f1e7c11e062436b0372b7d2c748e75",
"size": "2077",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "tensorflow/lite/experimental/mlir/testing/op_tests/reciprocal.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "148184"
},
{
"name": "C++",
"bytes": "6224499"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "650478"
},
{
"name": "Java",
"bytes": "53519"
},
{
"name": "JavaScript",
"bytes": "6659"
},
{
"name": "Jupyter Notebook",
"bytes": "777935"
},
{
"name": "Objective-C",
"bytes": "1288"
},
{
"name": "Protocol Buffer",
"bytes": "61743"
},
{
"name": "Python",
"bytes": "3474762"
},
{
"name": "Shell",
"bytes": "45640"
},
{
"name": "TypeScript",
"bytes": "283668"
}
],
"symlink_target": ""
}
|
from argparse import ArgumentParser
from sys import stdin
def parse_args():
p = ArgumentParser()
p.add_argument('columns', nargs='*', type=int,
help="Specify which columns should be reversed."
"All columns are reversed if not specified")
p.add_argument('-d', '--delimiter', type=str, default='\t')
return p.parse_args()
def reverse_columns_stdin(columns, delimiter):
for line in stdin:
try:
line = line.decode('utf8')
except AttributeError:
pass
fd = line.rstrip('\n').split(delimiter)
out = []
for i, field in enumerate(fd):
if not columns or i+1 in columns:
out.append(field[::-1])
else:
out.append(field)
try:
print(delimiter.join(out).encode('utf8'))
except AttributeError:
print(delimiter.join(out))
def main():
args = parse_args()
reverse_columns_stdin(args.columns, args.delimiter)
if __name__ == '__main__':
main()
|
{
"content_hash": "d7e8015595d91be288e425e6f94b8a1c",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 67,
"avg_line_length": 27.894736842105264,
"alnum_prop": 0.560377358490566,
"repo_name": "juditacs/morph-segmentation",
"id": "55ee189fb264c021b398a29b0f89b9f06d0eb564",
"size": "1224",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/reverse_columns.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "281669"
},
{
"name": "Python",
"bytes": "97835"
}
],
"symlink_target": ""
}
|
bl_info = {
"name": "Sure UVW Map v.0.5.1",
"author": "Alexander Milovsky (www.milovsky.ru)",
"version": (0, 5),
"blender": (2, 6, 3),
"api": 45093,
"location": "Properties > Object Data (below UV Maps), parameters in Tool Properties",
"description": "Box / Best Planar UVW Map (Make Material With Raster Texture First!)",
"warning": "",
"wiki_url": "http://blenderartists.org/forum/showthread.php?236631-Addon-Simple-Box-UVW-Map-Modifier",
"tracker_url": "https://projects.blender.org/tracker/index.php",
"category": "Mesh"}
import bpy
from bpy.props import BoolProperty, FloatProperty, StringProperty, FloatVectorProperty
from math import sin, cos, pi
from mathutils import Vector
# globals for Box Mapping
all_scale_def = 1
tex_aspect = 1.0
x_offset_def = 0
y_offset_def = 0
z_offset_def = 0
x_rot_def = 0
y_rot_def = 0
z_rot_def = 0
# globals for Best Planar Mapping
xoffset_def = 0
yoffset_def = 0
zrot_def = 0
# Preview flag
preview_flag = True
def show_texture():
obj = bpy.context.active_object
mesh = obj.data
is_editmode = (obj.mode == 'EDIT')
# if in EDIT Mode switch to OBJECT
if is_editmode:
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
# if no UVtex - create it
if not mesh.uv_textures:
uvtex = bpy.ops.mesh.uv_texture_add()
uvtex = mesh.uv_textures.active
uvtex.active_render = True
img = None
aspect = 1.0
mat = obj.active_material
try:
if mat:
img = mat.active_texture
for f in mesh.polygons:
if not is_editmode or f.select:
uvtex.data[f.index].image = img.image
else:
img = None
except:
pass
# Back to EDIT Mode
if is_editmode:
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
def box_map():
#print('** Boxmap **')
global all_scale_def,x_offset_def,y_offset_def,z_offset_def,x_rot_def,y_rot_def,z_rot_def, tex_aspect
obj = bpy.context.active_object
mesh = obj.data
is_editmode = (obj.mode == 'EDIT')
# if in EDIT Mode switch to OBJECT
if is_editmode:
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
# if no UVtex - create it
if not mesh.uv_textures:
uvtex = bpy.ops.mesh.uv_texture_add()
uvtex = mesh.uv_textures.active
#uvtex.active_render = True
img = None
aspect = 1.0
mat = obj.active_material
try:
if mat:
img = mat.active_texture
aspect = img.image.size[0]/img.image.size[1]
except:
pass
aspect = aspect * tex_aspect
#
# Main action
#
if all_scale_def:
sc = 1.0/all_scale_def
else:
sc = 1.0
sx = 1 * sc
sy = 1 * sc
sz = 1 * sc
ofx = x_offset_def
ofy = y_offset_def
ofz = z_offset_def
rx = x_rot_def / 180 * pi
ry = y_rot_def / 180 * pi
rz = z_rot_def / 180 * pi
crx = cos(rx)
srx = sin(rx)
cry = cos(ry)
sry = sin(ry)
crz = cos(rz)
srz = sin(rz)
ofycrx = ofy * crx
ofzsrx = ofz * srx
ofysrx = ofy * srx
ofzcrx = ofz * crx
ofxcry = ofx * cry
ofzsry = ofz * sry
ofxsry = ofx * sry
ofzcry = ofz * cry
ofxcry = ofx * cry
ofzsry = ofz * sry
ofxsry = ofx * sry
ofzcry = ofz * cry
ofxcrz = ofx * crz
ofysrz = ofy * srz
ofxsrz = ofx * srz
ofycrz = ofy * crz
#uvs = mesh.uv_loop_layers[mesh.uv_loop_layers.active_index].data
uvs = mesh.uv_layers.active.data
for i, pol in enumerate(mesh.polygons):
if not is_editmode or mesh.polygons[i].select:
for j, loop in enumerate(mesh.polygons[i].loop_indices):
v_idx = mesh.loops[loop].vertex_index
#print('before[%s]:' % v_idx)
#print(uvs[loop].uv)
n = mesh.polygons[i].normal
co = mesh.vertices[v_idx].co
x = co.x * sx
y = co.y * sy
z = co.z * sz
if abs(n[0]) > abs(n[1]) and abs(n[0]) > abs(n[2]):
# X
if n[0] >= 0:
uvs[loop].uv[0] = y * crx + z * srx - ofycrx - ofzsrx
uvs[loop].uv[1] = -y * aspect * srx + z * aspect * crx + ofysrx - ofzcrx
else:
uvs[loop].uv[0] = -y * crx + z * srx + ofycrx - ofzsrx
uvs[loop].uv[1] = y * aspect * srx + z * aspect * crx - ofysrx - ofzcrx
elif abs(n[1]) > abs(n[0]) and abs(n[1]) > abs(n[2]):
# Y
if n[1] >= 0:
uvs[loop].uv[0] = -x * cry + z * sry + ofxcry - ofzsry
uvs[loop].uv[1] = x * aspect * sry + z * aspect * cry - ofxsry - ofzcry
else:
uvs[loop].uv[0] = x * cry + z * sry - ofxcry - ofzsry
uvs[loop].uv[1] = -x * aspect * sry + z * aspect * cry + ofxsry - ofzcry
else:
# Z
if n[2] >= 0:
uvs[loop].uv[0] = x * crz + y * srz + - ofxcrz - ofysrz
uvs[loop].uv[1] = -x * aspect * srz + y * aspect * crz + ofxsrz - ofycrz
else:
uvs[loop].uv[0] = -y * srz - x * crz + ofxcrz - ofysrz
uvs[loop].uv[1] = y * aspect * crz - x * aspect * srz - ofxsrz - ofycrz
# Back to EDIT Mode
if is_editmode:
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
# Best Planar Mapping
def best_planar_map():
global all_scale_def,xoffset_def,yoffset_def,zrot_def, tex_aspect
obj = bpy.context.active_object
mesh = obj.data
is_editmode = (obj.mode == 'EDIT')
# if in EDIT Mode switch to OBJECT
if is_editmode:
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
# if no UVtex - create it
if not mesh.uv_textures:
uvtex = bpy.ops.mesh.uv_texture_add()
uvtex = mesh.uv_textures.active
#uvtex.active_render = True
img = None
aspect = 1.0
mat = obj.active_material
try:
if mat:
img = mat.active_texture
aspect = img.image.size[0]/img.image.size[1]
except:
pass
aspect = aspect * tex_aspect
#
# Main action
#
if all_scale_def:
sc = 1.0/all_scale_def
else:
sc = 1.0
# Calculate Average Normal
v = Vector((0,0,0))
cnt = 0
for f in mesh.polygons:
if f.select:
cnt += 1
v = v + f.normal
zv = Vector((0,0,1))
q = v.rotation_difference(zv)
sx = 1 * sc
sy = 1 * sc
sz = 1 * sc
ofx = xoffset_def
ofy = yoffset_def
rz = zrot_def / 180 * pi
cosrz = cos(rz)
sinrz = sin(rz)
#uvs = mesh.uv_loop_layers[mesh.uv_loop_layers.active_index].data
uvs = mesh.uv_layers.active.data
for i, pol in enumerate(mesh.polygons):
if not is_editmode or mesh.polygons[i].select:
for j, loop in enumerate(mesh.polygons[i].loop_indices):
v_idx = mesh.loops[loop].vertex_index
n = pol.normal
co = q * mesh.vertices[v_idx].co
x = co.x * sx
y = co.y * sy
z = co.z * sz
uvs[loop].uv[0] = x * cosrz - y * sinrz + xoffset_def
uvs[loop].uv[1] = aspect*(- x * sinrz - y * cosrz) + yoffset_def
# Back to EDIT Mode
if is_editmode:
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
class SureUVWOperator(bpy.types.Operator):
bl_idname = "object.sureuvw_operator"
bl_label = "Sure UVW Map"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "data"
bl_options = {'REGISTER', 'UNDO'}
action = StringProperty()
size = FloatProperty(name="Size", default=1.0, precision=4)
rot = FloatVectorProperty(name="XYZ Rotation")
offset = FloatVectorProperty(name="XYZ offset", precision=4)
zrot = FloatProperty(name="Z rotation", default=0.0)
xoffset = FloatProperty(name="X offset", default=0.0, precision=4)
yoffset = FloatProperty(name="Y offset", default=0.0, precision=4)
texaspect = FloatProperty(name="Texture aspect", default=1.0, precision=4)
flag90 = BoolProperty()
flag90ccw = BoolProperty()
@classmethod
def poll(cls, context):
obj = context.active_object
return (obj and obj.type == 'MESH')
def execute(self, context):
#print('** execute **')
#print(self.action)
global all_scale_def,x_offset_def,y_offset_def,z_offset_def,x_rot_def,y_rot_def,z_rot_def, xoffset_def, yoffset_def, zrot_def, tex_aspect
all_scale_def = self.size
tex_aspect = self.texaspect
x_offset_def = self.offset[0]
y_offset_def = self.offset[1]
z_offset_def = self.offset[2]
x_rot_def = self.rot[0]
y_rot_def = self.rot[1]
z_rot_def = self.rot[2]
xoffset_def = self.xoffset
yoffset_def = self.yoffset
zrot_def = self.zrot
if self.flag90:
self.zrot += 90
zrot_def += 90
self.flag90 = False
if self.flag90ccw:
self.zrot += -90
zrot_def += -90
self.flag90ccw = False
if self.action == 'bestplanar':
best_planar_map()
elif self.action == 'box':
box_map()
elif self.action == 'showtex':
show_texture()
elif self.action == 'doneplanar':
best_planar_map()
elif self.action == 'donebox':
box_map()
#print('finish execute')
return {'FINISHED'}
def invoke(self, context, event):
#print('** invoke **')
#print(self.action)
global all_scale_def,x_offset_def,y_offset_def,z_offset_def,x_rot_def,y_rot_def,z_rot_def, xoffset_def, yoffset_def, zrot_def, tex_aspect
self.size = all_scale_def
self.texaspect = tex_aspect
self.offset[0] = x_offset_def
self.offset[1] = y_offset_def
self.offset[2] = z_offset_def
self.rot[0] = x_rot_def
self.rot[1] = y_rot_def
self.rot[2] = z_rot_def
self.xoffset = xoffset_def
self.yoffset = yoffset_def
self.zrot = zrot_def
if self.action == 'bestplanar':
best_planar_map()
elif self.action == 'box':
box_map()
elif self.action == 'showtex':
show_texture()
elif self.action == 'doneplanar':
best_planar_map()
elif self.action == 'donebox':
box_map()
#print('finish invoke')
return {'FINISHED'}
def draw(self, context):
if self.action == 'bestplanar' or self.action == 'rotatecw' or self.action == 'rotateccw':
self.action = 'bestplanar'
layout = self.layout
layout.label("Size - "+self.action)
layout.prop(self,'size',text="")
layout.label("Z rotation")
col = layout.column()
col.prop(self,'zrot',text="")
row = layout.row()
row.prop(self,'flag90ccw',text="-90 (CCW)")
row.prop(self,'flag90',text="+90 (CW)")
layout.label("XY offset")
col = layout.column()
col.prop(self,'xoffset', text="")
col.prop(self,'yoffset', text="")
layout.label("Texture aspect")
layout.prop(self,'texaspect', text="")
#layout.prop(self,'preview_flag', text="Interactive Preview")
#layout.operator("object.sureuvw_operator",text="Done").action='doneplanar'
elif self.action == 'box':
layout = self.layout
layout.label("Size")
layout.prop(self,'size',text="")
layout.label("XYZ rotation")
col = layout.column()
col.prop(self,'rot', text="")
layout.label("XYZ offset")
col = layout.column()
col.prop(self,'offset', text="")
layout.label("Texture squash (optional)")
layout.label("Always must be 1.0 !!!")
layout.prop(self,'texaspect', text="")
#layout.prop(self,'preview_flag', text="Interactive Preview")
#layout.operator("object.sureuvw_operator",text="Done").action='donebox'
class SureUVWPanel(bpy.types.Panel):
bl_label = "Sure UVW Mapping v.0.5.1"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "data"
#bl_space_type = "VIEW_3D"
#bl_region_type = "TOOLS"
#bl_region_type = "TOOL_PROPS"
@classmethod
def poll(cls, context):
obj = context.active_object
return (obj and obj.type == 'MESH')
def draw(self, context):
self.layout.label("Press this button first:")
self.layout.operator("object.sureuvw_operator",text="Show active texture on object").action='showtex'
self.layout.label("UVW Mapping:")
self.layout.operator("object.sureuvw_operator",text="UVW Box Map").action='box'
self.layout.operator("object.sureuvw_operator",text="Best Planar Map").action='bestplanar'
self.layout.label("1. Make Material With Raster Texture!")
self.layout.label("2. Set Texture Mapping Coords: UV!")
self.layout.label("3. Use Addon buttons")
#
# Registration
#
def register():
bpy.utils.register_class(SureUVWOperator)
bpy.utils.register_class(SureUVWPanel)
def unregister():
bpy.utils.unregister_class(SureUVWOperator)
bpy.utils.unregister_class(SureUVWPanel)
if __name__ == "__main__":
register()
|
{
"content_hash": "ec041b1a6dc521a0a0edcd47deb7e27d",
"timestamp": "",
"source": "github",
"line_count": 467,
"max_line_length": 145,
"avg_line_length": 31.241970021413277,
"alnum_prop": 0.5134338588074023,
"repo_name": "creepydragon/revision1",
"id": "4d58925833caf8d789e405ff56515e01a97d36e3",
"size": "14590",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "blender/sure_uvwbox51.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "65252"
},
{
"name": "C++",
"bytes": "1256414"
},
{
"name": "Lex",
"bytes": "2552"
},
{
"name": "Python",
"bytes": "37861"
},
{
"name": "QMake",
"bytes": "30527"
},
{
"name": "Shell",
"bytes": "98"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
from functools import lru_cache
from django.db.models import Sum
from django.template.response import TemplateResponse
from django.urls import reverse_lazy as reverse
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from ...forms.reports.orderables import OrderablePaymentsForm, OrderablePaymentsStatusForm, OrderableStatsForm
from ...models.agegroup import AgeGroup
from ...models.citizenship import Citizenship
from ...models.orderables import Orderable, OrderableRegistration
from ...models.roles import Participant
from ...models.subjects import SubjectPayment, SubjectRegistrationParticipant, SubjectType
from ...views.generic import FormView
class ReportOrderablePaymentsView(FormView):
form_class = OrderablePaymentsForm
template_name = "leprikon/reports/orderable_payments.html"
title = _("Orderable payments")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["received_payments"] = SubjectPayment.objects.filter(
target_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["returned_payments"] = SubjectPayment.objects.filter(
source_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["received_payments_sum"] = context["received_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["returned_payments_sum"] = context["returned_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["sum"] = context["received_payments_sum"] - context["returned_payments_sum"]
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class ReportOrderablePaymentsStatusView(FormView):
form_class = OrderablePaymentsStatusForm
template_name = "leprikon/reports/orderable_payments_status.html"
title = _("Orderable event payments status")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
OrderablePaymentsStatusSums = namedtuple("OrderablePaymentsStatusSums", ("registrations", "status"))
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["reports"] = [
self.Report(orderable, context["date"])
for orderable in Orderable.objects.filter(school_year=self.request.school_year)
]
context["sum"] = self.OrderablePaymentsStatusSums(
registrations=sum(len(r.registration_statuses) for r in context["reports"]),
status=sum(r.status for r in context["reports"]),
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class Report:
def __init__(self, orderable, d):
self.orderable = orderable
self.date = d
RegPaymentStatus = namedtuple("RegPaymentStatus", ("registration", "status"))
@cached_property
def registration_statuses(self):
return [
registration_status
for registration_status in (
self.RegPaymentStatus(
registration=registration,
status=registration.get_payment_status(self.date),
)
for registration in OrderableRegistration.objects.filter(
subject=self.orderable,
approved__date__lte=self.date,
)
)
if registration_status.status.receivable
]
@cached_property
def status(self):
return sum(rs.status for rs in self.registration_statuses)
class ReportOrderableStatsView(FormView):
form_class = OrderableStatsForm
template_name = "leprikon/reports/orderable_stats.html"
title = _("Orderable statistics")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
ReportItem = namedtuple("ReportItem", ("age_group", "all", "boys", "girls", "citizenships"))
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["school_year"] = self.request.school_year
return kwargs
@lru_cache
def get_orderable_days(self, orderable_id):
orderable = Orderable.objects.get(id=orderable_id)
return orderable.duration.days + 1
def form_valid(self, form):
d = form.cleaned_data["date"]
paid_only = form.cleaned_data["paid_only"]
paid_later = form.cleaned_data["paid_later"]
approved_later = form.cleaned_data["approved_later"]
unique_participants = form.cleaned_data["unique_participants"]
min_days = form.cleaned_data["min_days"]
context = form.cleaned_data
context["form"] = form
if approved_later:
# approved registrations created by the date
participants = SubjectRegistrationParticipant.objects.filter(
registration__created__date__lte=d,
registration__approved__isnull=False,
)
else:
# registrations approved by the date
participants = SubjectRegistrationParticipant.objects.filter(
registration__approved__date__lte=d,
)
participants = (
participants.filter(
registration__subject__in=form.cleaned_data["orderables"],
)
.exclude(registration__canceled__date__lte=d)
.select_related("registration", "age_group")
)
if paid_only:
paid_date = None if paid_later else d
participants = [
participant
for participant in participants
if participant.registration.orderableregistration.get_payment_status(paid_date).balance >= 0
]
else:
participants = list(participants)
if min_days:
participants = [p for p in participants if self.get_orderable_days(p.registration.subject_id) >= min_days]
context["orderables_count"] = len(set(participant.registration.subject_id for participant in participants))
if unique_participants:
participants = list({p.key: p for p in participants}.values())
citizenships = list(Citizenship.objects.all())
context["citizenships"] = citizenships
context["participants_counts"] = self.ReportItem(
age_group=None,
all=len(participants),
boys=len([p for p in participants if p.gender == Participant.MALE]),
girls=len([p for p in participants if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in participants if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
context["participants_counts_by_age_groups"] = []
for age_group in AgeGroup.objects.all():
parts = [p for p in participants if p.age_group == age_group]
context["participants_counts_by_age_groups"].append(
self.ReportItem(
age_group=age_group,
all=len(parts),
boys=len([p for p in parts if p.gender == Participant.MALE]),
girls=len([p for p in parts if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in parts if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
|
{
"content_hash": "e431bf318861e2633af99aaa388da600",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 118,
"avg_line_length": 42.670212765957444,
"alnum_prop": 0.6270256793817003,
"repo_name": "leprikon-cz/leprikon",
"id": "37424c5e0733e925d60ff9148cf4c8386b63d475",
"size": "8022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leprikon/views/reports/orderables.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "8939"
},
{
"name": "Dockerfile",
"bytes": "1983"
},
{
"name": "HTML",
"bytes": "186044"
},
{
"name": "JavaScript",
"bytes": "8033"
},
{
"name": "Python",
"bytes": "856654"
},
{
"name": "Shell",
"bytes": "3840"
}
],
"symlink_target": ""
}
|
from math import sqrt
# Select the active layer.
mylayer = iface.activeLayer()
# List of all start-points and end-points of all line segments and their id
startEndPoints = []
for feature in mylayer.getFeatures():
xy = feature.geometry().asPolyline()
startEndPoints.append({"line": (xy[0], xy[-1]), "id": feature.id()})
# Get the paths for SVG drawing
### Start
paths = []
for feature in mylayer.getFeatures():
if feature.geometry().asMultiPolyline():
# keep all the points of the line
paths.extend([{"line": l, "id": features.id()} for l in feature.geometry().asMultiPolyline()])
elif feature.geometry().asPolyline():
paths.append({"line": feature.geometry().asPolyline(), "id": features.id()})
svgText = ''
for line in paths:
svgText += '<polyline points="'
for point in line:
svgText += (str(point[0]) + ',' + str(-1 * point[1]) + ' ')
svgText += '" />'
print svgText
### End
# it should look for the shortest between lines and not stop!!!!!
# Radius in which every 2 points are checked
C_TRESHHOLD = 0.0002
# Calculate the distance between every 2 points and if they are within the defined radius.
# The distance also has to be higher than 0. Otherwise points meet.
def arePointsCloseToEachOther(p1, p2):
dx = p1[0] - p2[0]
dy = p1[1] - p2[1]
return sqrt((dx * dx) + (dy * dy)) < C_TRESHHOLD and sqrt((dx * dx) + (dy * dy)) > 0
# Check if lines are touching each other.
def getClosePointsForLines(l1, l2):
for i in xrange(2):
for j in xrange(2):
# Compare with the last point and then first point, in case input line
# still has all its points (we need only start and end)
if arePointsCloseToEachOther(l1[i - 1], l2[j - 1]):
# Print the close by points
print (l1[i - 1], l2[j - 1])
# index 0 means it's the endpoint, index 1 means it's the startpoint!!!!!!!!!!
return {"indices": [i-1, j-1], "points": [l1[i-1], l2[j-1]]}
return NULL
# Create a list to collect all the close-by points.
closePoints = []
# Create a list to collect all the close-by lines.
closeLines = []
for f1 in startEndPoints:
for f2 in startEndPoints:
# f1[2] < f2[2] check the id, so that only once a single endpoint would be compared (handshakes).
if f1["id"] < f2["id"] and getClosePointsForLines(f1["line"], f2["line"]):
closePoints.append({ "lines": getClosePointsForLines(f1["line"], f2["line"]), "ids": [f1["id"], f2["id"]]})
closeLines.append({"original": [f1, f2]})
# Lines with the fixed point coordinates
newLines = []
# cP = close points
# cL = close lines
def newClosePointsCoordinates(cP, cL):
for pointPair in cP:
eP = pointPair["lines"]["points"][0]
sP = pointPair["lines"]["points"][1]
ePx = eP[0]
ePy = eP[1]
sPx = sP[0]
sPy = sP[1]
newX = ePx - ((ePx - sPx)/2)
newY = ePy - ((ePy - sPy)/2)
# think of a way to say that they all are equal
ePx = newX
ePy = newY
sPx = newX
sPy = newY
newCoord = (ePx, ePy)
pointPair["lines"]["points"][0] = newCoord
pointPair["lines"]["points"][1] = newCoord
for linePair in cL:
# if first point to be changed has a start point of original line and if its id matches id from its original line
if pointPair["lines"]["indices"][0] == -1 and linePair["original"][0]["id"] == pointPair["ids"][0]:
# apply new coordinate to the start point, end point stays the same
newLines.append({"line":(linePair["original"][0]["line"][0], newCoord), "id":pointPair["ids"][0]})
# if first point to be changed was an end point of original line and if its id matches id from its original line
elif pointPair["lines"]["indices"][0] == 0 and linePair["original"][0]["id"] == pointPair["ids"][0]:
# apply new coordinate to the end point, start point stays the same
newLines.append({"line":(newCoord, linePair["original"][0]["line"][-1]), "id":pointPair["ids"][0]})
else:
return False
# if first point to be changed was a start point of original line and if its id matches id from its original line
if pointPair["lines"]["indices"][1] == -1 and linePair["original"][1]["id"] == pointPair["ids"][1]:
# apply new coordinate to the start point, end point stays the same
newLines.append({"line":(linePair["original"][1]["line"][0], newCoord), "id":pointPair["ids"][1]})
# if first point to be changed was an end point of original line and if its id matches id from its original line
elif pointPair["lines"]["indices"][1] == 0 and linePair["original"][1]["id"] == pointPair["ids"][1]:
# apply new coordinate to the end point, start point stays the same
newLines.append({"line":(newCoord, linePair["original"][1]["line"][-1]), "id":pointPair["ids"][1]})
else:
return False
return newLines
newClosePointsCoordinates(closePoints, closeLines)
#Create the points
# create a memory layer with two points
fixedlines = QgsVectorLayer('Point', 'points' , "memory")
pr = fixedlines.dataProvider()
# add the first point
pt = QgsFeature()
coordx1 = newLines[0]["line"][0][0]
coordy1 = newLines[0]["line"][0][1]
coordx2 = newLines[0]["line"][1][0]
coordy2 = newLines[0]["line"][1][1]
point1 = QgsPoint(coordx1, coordy1)
pt.setGeometry(QgsGeometry.fromPoint(point1))
pr.addFeatures([pt])
# update extent of the layer
fixedlines.updateExtents()
# add the second point
pt = QgsFeature()
point2 = QgsPoint(coordx2, coordy2)
pt.setGeometry(QgsGeometry.fromPoint(point2))
pr.addFeatures([pt])
# update extent
fixedlines.updateExtents()
# add the layer to the canvas
QgsMapLayerRegistry.instance().addMapLayers([fixedlines])
#Create a new line
fixedlines = QgsVectorLayer('LineString', 'line' , "memory")
pr = fixedlines.dataProvider()
line = QgsFeature()
line.setGeometry(QgsGeometry.fromPolyline([point1,point2]))
pr.addFeatures([line])
fixedlines.updateExtents()
QgsMapLayerRegistry.instance().addMapLayers([fixedlines])
#Create the points
# create a memory layer with two points
fixedlines2 = QgsVectorLayer('Point', 'points' , "memory")
pr = fixedlines2.dataProvider()
# add the first point
pt = QgsFeature()
coordx1 = newLines[1]["line"][0][0]
coordy1 = newLines[1]["line"][0][1]
coordx2 = newLines[1]["line"][1][0]
coordy2 = newLines[1]["line"][1][1]
point1 = QgsPoint(coordx1, coordy1)
pt.setGeometry(QgsGeometry.fromPoint(point1))
pr.addFeatures([pt])
# update extent of the layer
fixedlines2.updateExtents()
# add the second point
pt = QgsFeature()
point2 = QgsPoint(coordx2, coordy2)
pt.setGeometry(QgsGeometry.fromPoint(point2))
pr.addFeatures([pt])
# update extent
fixedlines2.updateExtents()
# add the layer to the canvas
QgsMapLayerRegistry.instance().addMapLayers([fixedlines2])
#Create a new line
fixedlines2 = QgsVectorLayer('LineString', 'line' , "memory")
pr = fixedlines2.dataProvider()
line = QgsFeature()
line.setGeometry(QgsGeometry.fromPolyline([point1,point2]))
pr.addFeatures([line])
fixedlines2.updateExtents()
QgsMapLayerRegistry.instance().addMapLayers([fixedlines2])
|
{
"content_hash": "452b45eb3cd87b129f0a8daf20dedd41",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 125,
"avg_line_length": 39.95652173913044,
"alnum_prop": 0.6459466811751904,
"repo_name": "sigita42/vagrant-postgres-osm",
"id": "3b3ed4ab87804a25a350365f3c1fccab3f9fe232",
"size": "7352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Code/connectingLinesByExtending.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "236125"
},
{
"name": "Python",
"bytes": "46637"
},
{
"name": "Shell",
"bytes": "1718"
}
],
"symlink_target": ""
}
|
from eventlet import patcher
from nova.console import serial as serial_console
from nova.console import type as ctype
from nova import exception
from nova.i18n import _, _LI # noqa
from oslo_config import cfg
from oslo_log import log as logging
from hyperv.nova import constants
from hyperv.nova import ioutils
from hyperv.nova import namedpipe
from hyperv.nova import serialproxy
from hyperv.nova import utilsfactory
from hyperv.nova import vmutils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
threading = patcher.original('threading')
class SerialConsoleHandler(object):
"""Handles serial console ops related to a given instance."""
def __init__(self, instance_name):
self._vmutils = utilsfactory.get_vmutils()
self._pathutils = utilsfactory.get_pathutils()
self._instance_name = instance_name
self._log_path = self._pathutils.get_vm_console_log_paths(
self._instance_name)[0]
self._client_connected = None
self._input_queue = None
self._output_queue = None
self._serial_proxy = None
self._workers = []
def start(self):
self._setup_handlers()
for worker in self._workers:
worker.start()
def stop(self):
for worker in self._workers:
worker.stop()
if self._serial_proxy:
serial_console.release_port(self._listen_host,
self._listen_port)
def _setup_handlers(self):
if CONF.serial_console.enabled:
self._setup_serial_proxy_handler()
self._setup_named_pipe_handlers()
def _setup_serial_proxy_handler(self):
self._listen_host = (
CONF.serial_console.proxyclient_address)
self._listen_port = serial_console.acquire_port(
self._listen_host)
LOG.info(_LI('Initializing serial proxy on '
'%(addr)s:%(port)s, handling connections '
'to instance %(instance_name)s.'),
{'addr': self._listen_host,
'port': self._listen_port,
'instance_name': self._instance_name})
# Use this event in order to manage
# pending queue operations.
self._client_connected = threading.Event()
self._input_queue = ioutils.IOQueue(
client_connected=self._client_connected)
self._output_queue = ioutils.IOQueue(
client_connected=self._client_connected)
self._serial_proxy = serialproxy.SerialProxy(
self._instance_name, self._listen_host,
self._listen_port, self._input_queue,
self._output_queue, self._client_connected)
self._workers.append(self._serial_proxy)
def _setup_named_pipe_handlers(self):
# At most 2 named pipes will be used to access the vm serial ports.
#
# The named pipe having the 'ro' suffix will be used only for logging
# while the 'rw' pipe will be used for interactive sessions, logging
# only when there is no 'ro' pipe.
serial_port_mapping = self._get_vm_serial_port_mapping()
log_rw_pipe_output = not serial_port_mapping.get(
constants.SERIAL_PORT_TYPE_RO)
for pipe_type, pipe_path in serial_port_mapping.iteritems():
enable_logging = (pipe_type == constants.SERIAL_PORT_TYPE_RO or
log_rw_pipe_output)
handler = self._get_named_pipe_handler(
pipe_path,
pipe_type=pipe_type,
enable_logging=enable_logging)
self._workers.append(handler)
def _get_named_pipe_handler(self, pipe_path, pipe_type,
enable_logging):
kwargs = {}
if pipe_type == constants.SERIAL_PORT_TYPE_RW:
kwargs = {'input_queue': self._input_queue,
'output_queue': self._output_queue,
'connect_event': self._client_connected}
if enable_logging:
kwargs['log_file'] = self._log_path
handler = namedpipe.NamedPipeHandler(pipe_path, **kwargs)
return handler
def _get_vm_serial_port_mapping(self):
serial_port_conns = self._vmutils.get_vm_serial_port_connections(
self._instance_name)
if not serial_port_conns:
err_msg = _("No suitable serial port pipe was found "
"for instance %(instance_name)s")
raise vmutils.HyperVException(
err_msg % {'instance_name': self._instance_name})
serial_port_mapping = {}
# At the moment, we tag the pipes by using a pipe path suffix
# as we can't use the serial port ElementName attribute because of
# a Hyper-V bug.
for pipe_path in serial_port_conns:
port_type = pipe_path[-2:]
if port_type in [constants.SERIAL_PORT_TYPE_RO,
constants.SERIAL_PORT_TYPE_RW]:
serial_port_mapping[port_type] = pipe_path
else:
serial_port_mapping[constants.SERIAL_PORT_TYPE_RW] = pipe_path
return serial_port_mapping
def get_serial_console(self):
if not CONF.serial_console.enabled:
raise exception.ConsoleTypeUnavailable(console_type='serial')
return ctype.ConsoleSerial(host=self._listen_host,
port=self._listen_port)
|
{
"content_hash": "2a6db7605daaaa5419f1e99293242291",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 78,
"avg_line_length": 37.25170068027211,
"alnum_prop": 0.5969685902118335,
"repo_name": "adelina-t/compute-hyperv",
"id": "6f5f13671ff57fea1dc914ce99b6d74286dd9c1c",
"size": "6115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hyperv/nova/serialconsolehandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "685121"
}
],
"symlink_target": ""
}
|
""" This program provides an agent that sends and responds to
health-check packets in order to determine the liveliness of the
configured MPLS tunnels. """
import eossdk
import eossdk_utils
import functools
import json
import os
import pyinotify
import scapy
import scapy.fields
import scapy.layers.l2
import scapy.layers.inet
import scapy.packet
import scapy.route
import scapy.sendrecv
import socket
import struct
import sys
import time
# Requires: RPMs for scapy and EosSdk, as well as the eossdk_utils.py
# script (for debugging). Tunnel configuration is done at the bottom
# of this file in the main function.
# The main agent is located in the MplsTunnelLivenessAgent class below.
POLL_TIME = 1 # how often to send a liveness packet in seconds
TIMEOUT_TIME = 5 # seconds before a tunnel is declared dead
STARTUP_GRACEPERIOD = 0 # seconds after startup before we start checking a tunnel
# Make sure your IP tables are up to date on the switch:
# > sudo iptables -I INPUT -p UDP --dport 17171 -j ACCEPT
UDP_PORT = 17171
MAX_PKT_SIZE = 2048 # The maximum payload size of our packet
MAX_INT = 0xffffffff # The maximum size of a 4 byte unsigned int
class Message(object):
""" A Message is the payload of the health-check packets that this
agent sends out and receives. It consists of two parts. The first
is a header that contains an number that identifies which tunnel
the sender sent this message out of. The header also contains a
numeric id of the packet, and finally, a number describing how many
'entries' are in the second part of the packet. This second part is
a list of 0 or more 'tunnel status entries'. Each entry contains a
numeric tunnel identifier and a boolean describing whether the
sending switch thinks that tunnel is alive or not."""
# Header consists of (version, pid, sender's tunnel key, msg id,
# num status entries), as integers, in little-endian:
header_format = '<IIIII'
header_len = struct.calcsize(header_format)
tunnel_entry_format = '<I?' # tunnel_key, bool
tunnel_entry_len = struct.calcsize(tunnel_entry_format)
def __init__(self, pid, egress_tunnel_key, msg_id, tunnel_liveness):
self.pid = pid
self.egress_tunnel_key = egress_tunnel_key
self.msg_id = msg_id
# Mapping from tunnel_key to boolean whether this is alive or not
self.tunnel_liveness = tunnel_liveness
def serialize(self):
# First put the length of this packet
ret = struct.pack(Message.header_format, 1, self.pid, self.egress_tunnel_key,
self.msg_id, len(self.tunnel_liveness))
for tunnel_key, is_alive in self.tunnel_liveness.iteritems():
ret += struct.pack(Message.tunnel_entry_format, tunnel_key, is_alive)
if len(ret) > MAX_PKT_SIZE:
assert False, "Packet %s too large to send!" % self.__str__()
return ret
def __str__(self):
return "Message(sender_pid=%d, egress_tunnel_key=%d, id=%d, %r)" % (
self.pid, self.egress_tunnel_key, self.msg_id, self.tunnel_liveness)
@staticmethod
def deserialize(buf):
""" Given a buffer, create and return a Message from the
buffer's contents. If the buffer does not contain a valid
Message, this returns None.
"""
if len(buf) < Message.header_len:
return None
version, pid, egress_tunnel_key, msg_id, num_entries = struct.unpack(
Message.header_format, buf[:Message.header_len])
if version != 1:
return None
msg_len = Message.header_len + Message.tunnel_entry_len * num_entries
if len(buf) < msg_len:
return None
liveness = {}
for i in xrange(Message.header_len, msg_len,
Message.tunnel_entry_len):
# Unpack each status entry reported in this packet
key, is_alive = struct.unpack(Message.tunnel_entry_format,
buf[i : i + Message.tunnel_entry_len])
liveness[key] = is_alive
return Message(pid, egress_tunnel_key, msg_id, liveness)
class EgressTunnel(object):
""" Contains the configuration and status of this switch's outgoing
tunnels. """
def __init__(self, label, nexthop_ip_addr):
# Configurable attributes
self.mpls_label = label
self.nexthop_ip = nexthop_ip_addr
# Dynamic attributes:
# The bridging MAC of the nexthop:
self.nexthop_eth_addr = None
# The interface the nexthop_eth_addr lives on:
self.egress_intf = None
# ... and the MAC address of that interface:
self.egress_intf_eth_addr = None
self.last_update_time = 0
self.is_alive = True
class RemoteTunnelStatus(object):
""" Tracks the status of a remote tunnel (a tunnel where the packet
sender is the remote switch). """
def __init__(self):
self.last_rx_msg_id = 0
self.last_update_time = time.time()
class RemoteSwitch(object):
""" This object stores the configuration for our outgoing tunnels to
this remote switch, as well as a status collection containing our view on
the liveness of that switch's tunnels to us. """
def __init__(self, dest_ip):
# Configuration
# The IP address of the remote switch
self.destination_ip = dest_ip
# The following dictionary keeps track of our outgoing tunnels
# to this switch. It is a mapping from integer tunnel_key to a
# EgressTunnel()
self.egress_tunnels = {}
# Status
self.last_tx_msg_id = 0
self.last_rx_msg_id = 0
self.pid = 0
# The `remote_tunnel_status` variable keeps track of whether their
# tunnels are alive or not. It is a mapping from an integer
# tunnel_key to a RemoteTunnelStatus() object. Note that these
# keys correspond to the remote switche's tunnel collection, and
# is not the same as the keys for the `tunnels` variable above.
self.remote_tunnel_status = {}
def liveness_dict(self, cur_time):
ret = {}
for key, tunnel_status in self.remote_tunnel_status.items():
time_delta = cur_time - tunnel_status.last_update_time
if time_delta > (TIMEOUT_TIME*10):
# Stop sending tunnels that we haven't heard from in a
# really long time.
del self.remote_tunnel_status[key]
elif time_delta > TIMEOUT_TIME:
# Tunnel is dead!
ret[key] = False
else:
ret[key] = True
return ret
class MPLS(scapy.packet.Packet):
""" Create an MPLS header that can be used with scapy packets """
name = "MPLS"
fields_desc = [ scapy.fields.BitField("label", 9, 20),
scapy.fields.BitField("cos", 0, 3),
scapy.fields.BitField("s", 1, 1),
scapy.fields.ByteField("ttl", 0) ]
scapy.packet.bind_layers(scapy.layers.l2.Ether, MPLS, type=0x8847)
class InotifyHandler(pyinotify.ProcessEvent):
""" A helper class handles inotify updates """
def my_init(self, **kwargs):
self.parent = kwargs['parent']
def process_IN_MODIFY(self, event):
self.parent.process_config()
class MplsTunnelLivenessAgent(eossdk_utils.EosSdkAgent,
eossdk.AgentHandler,
eossdk.FdHandler,
eossdk.TimeoutHandler):
""" This agent is responsible for tracking the liveness of specified
MPLS tunnels. """
def __init__(self, sdk, config_file="MplsTunnelLivenessConfig.json"):
""" Create the agent. Requires an eossdk handle, as well as the
input configuration """
self.agent_mgr = sdk.get_agent_mgr()
self.eth_intf_mgr = sdk.get_eth_intf_mgr()
self.ip_intf_mgr = sdk.get_ip_intf_mgr()
self.mac_table_mgr = sdk.get_mac_table_mgr()
self.neighbor_table_mgr = sdk.get_neighbor_table_mgr()
self.tracer = eossdk.Tracer("MplsTunnelLivenessAgent")
eossdk_utils.EosSdkAgent.__init__(self)
eossdk.AgentHandler.__init__(self, self.agent_mgr)
eossdk.TimeoutHandler.__init__(self, sdk.get_timeout_mgr())
eossdk.FdHandler.__init__(self)
self.tracer.trace0("MPLS tunnel liveness agent constructed")
self.initialized = False
self.pid = os.getpid()
# The l3 interface we should grab our "SRC IP" from. Read from
# the config:
self.src_intf = None
self.src_ip = None # Resolved after reading from config
# A UDP socket that receives liveness packets from other
# agents. Created during on_initialized
self.rx_sock = None
# A mapping from remote switch IP to RemoteSwitch()
self.remote_switches = {}
self.config_file = config_file
self.wm = pyinotify.WatchManager()
handler = functools.partial(InotifyHandler, parent=self)
self.wm.watch_transient_file(config_file, pyinotify.IN_MODIFY, handler)
self.notifier = pyinotify.AsyncNotifier(self.wm,
InotifyHandler(parent=self))
self.notifier.coalesce_events(True)
self.inotify_fd = self.wm.get_fd()
self.watch_readable(self.inotify_fd, True)
# Read our initial configuration
self.process_config()
def on_initialized(self):
""" Update our configured egress tunnels. Start all tunnels as
alive, with a last_update_time of now + any grace
period. Calculate the output interfaces for each tunnel based
off of that tunnel's nexthop MAC address. """
self.initialized = True
self.tracer.trace2("Looking up the IP address for interface " + self.src_intf)
src_ips = self.ip_intf_mgr.ip_addrs(eossdk.IntfId(self.src_intf))
if not src_ips:
assert False, "No IP addresses assigned to %s" % self.src_intf
self.src_ip = src_ips[0].addr().to_string()
self.tracer.trace2("Using src IP address " + self.src_ip)
self.tracer.trace2("Create the socket that receives remote probes")
self.rx_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.rx_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.rx_sock.bind((self.src_ip, UDP_PORT))
self.rx_sock.setblocking(0)
self.watch_readable(self.rx_sock.fileno(), True)
self.resolve_config()
def handle_tunnel_alive(self, dst_ip, tunnel_key, tunnel):
self.tracer.trace3("Tunnel %d to %s came back!" % (tunnel_key, dst_ip))
# Do any other logic (a.k.a. alert another agent that
# tunnel.mpls_label is usable again)
def handle_tunnel_death(self, dst_ip, tunnel_key, tunnel):
self.tracer.trace3("Tunnel %d to %s died!" % (tunnel_key, dst_ip))
# Do any other logic (a.k.a. alert another agent that
# tunnel.mpls_label is no longer a valid tunnel)
def on_timeout(self):
""" Time to send some packets to our neighbors! Our poller
fired, so we should send out our heartbeat packets. We also
check if we haven't heard about any of our tunnels recently, and
if so, mark them as dead. """
cur_time = time.time()
for host in self.remote_switches.itervalues():
liveness_dict = host.liveness_dict(cur_time)
host.last_tx_msg_id += 1
if host.last_tx_msg_id > MAX_INT:
host.last_tx_msg_id = 1
for key, tunnel in host.egress_tunnels.iteritems():
msg = Message(self.pid, key, host.last_tx_msg_id, liveness_dict)
self.send_packet(host.destination_ip, tunnel, msg)
if tunnel.is_alive and (
time.time() - tunnel.last_update_time > TIMEOUT_TIME):
# There has been no updates to this tunnel at all
# within our timeout period.
tunnel.is_alive = False
self.handle_tunnel_death(host.destination_ip, key, tunnel)
# Schedule us to be called again in the future
self.timeout_time_is(eossdk.now() + POLL_TIME)
def on_readable(self, fd):
""" We got a packet on our UDP port! Read the packet, update our
views of the remote tunnel's liveness, and then parse the
packet's payload to inspect what the remote packet thinks of
*our* tunnel liveness. If any liveness changed, then fire our
handlers. """
if fd == self.inotify_fd:
self.tracer.trace6("Inotify fd %d is readable" % self.inotify_fd)
self.notifier.handle_read()
return
if fd != self.rx_sock.fileno():
assert False, "Unknown socket became readable %d" % fd
data, addr = self.rx_sock.recvfrom(MAX_PKT_SIZE)
src_ip = addr[0]
self.tracer.trace6("Received message from %r" % src_ip)
if not data:
self.tracer.trace7("Received empty message, ignoring.")
return
msg = Message.deserialize(data)
if not msg:
self.tracer.trace7("Received invalid message, ignoring! "
"First 500 bytes of pkt: %r" % data[:500])
return
self.tracer.trace8("Got message %s" % str(msg))
if src_ip not in self.remote_switches:
self.tracer.trace7("Got packet from unknown host: %r" % src_ip)
return
remote_switch = self.remote_switches[src_ip]
remote_tunnel_status = remote_switch.remote_tunnel_status.setdefault(
msg.egress_tunnel_key, RemoteTunnelStatus())
if msg.pid != remote_switch.pid:
# This is the either the first message we've received from
# them, or their remote switch restarted. In any case, the
# msg IDs they are sending will have been reset.
remote_switch.pid = msg.pid
remote_switch.last_rx_msg_id = 0
remote_tunnel_status.last_rx_msg_id = 0
# First track we got a packet from the sender's tunnel named
# in the packet.
if self.is_new_id(remote_tunnel_status.last_rx_msg_id, msg.msg_id):
# Do we care about packets coming in out of order?
remote_tunnel_status.last_update_time = time.time()
remote_tunnel_status.last_rx_msg_id = msg.msg_id
# Then inspect the body of the packet that tells me which of
# my tunnel statuses the remote switch has seen.
if not self.is_new_id(remote_switch.last_rx_msg_id, msg.msg_id):
# We've already seen newer messages. Ignore the this.
self.tracer.trace7("Got old message with id: %d (currently at %d)"
% (msg.msg_id, remote_switch.last_rx_msg_id))
return
remote_switch.last_rx_msg_id = msg.msg_id
for tunnel_key, is_alive in msg.tunnel_liveness.iteritems():
if tunnel_key not in remote_switch.egress_tunnels:
# They are telling us about one of our egress tunnels that
# we have no record of...
self.tracer.trace0("Got tunnel status for an unknown key: %r" %
tunnel_key)
continue
tunnel = remote_switch.egress_tunnels[tunnel_key]
tunnel.last_update_time = time.time()
# Check if the remote switch thinks our egress tunnel is
# up or down. If it changed, call our handlers!
if tunnel.is_alive == is_alive:
self.tracer.trace9("No change to liveness for tunnel %d" % tunnel_key)
continue
elif is_alive:
tunnel.is_alive = True
self.handle_tunnel_alive(src_ip, tunnel_key, tunnel)
else:
tunnel.is_alive = False
self.handle_tunnel_death(src_ip, tunnel_key, tunnel)
def resolve_egress_tunnel(self, tunnel):
self.tracer.trace8("Resolve the nexthop IP %s to an ethernet address" %
tunnel.nexthop_ip)
neighbor_key = eossdk.NeighborKey(
eossdk.IpAddr(tunnel.nexthop_ip), eossdk.IntfId())
neighbor_entry = self.neighbor_table_mgr.neighbor_entry_status(neighbor_key)
if neighbor_entry == eossdk.NeighborEntry():
self.tracer.trace8("Checking static ARP entries")
neighbor_entry = self.neighbor_table_mgr.neighbor_entry(neighbor_key)
if neighbor_entry == eossdk.NeighborEntry():
self.tracer.trace0("IP address %r has no ARP entry" %
tunnel.nexthop_ip)
assert False, "Unlearned nexthop IP %s" % tunnel.nexthop_ip
nexthop_eth_addr = neighbor_entry.eth_addr()
self.tracer.trace5("IP %s lives on %s" %
(tunnel.nexthop_ip, nexthop_eth_addr.to_string()))
tunnel.nexthop_eth_addr = nexthop_eth_addr.to_string()
self.tracer.trace8("Now resolving that MAC entry to an interface.")
# TODO: Is this necessary if we send it out of the "fabric"
# interface?
vlan_id = 1
mac_entry = self.mac_table_mgr.mac_entry(vlan_id, nexthop_eth_addr)
if mac_entry.intf() == eossdk.IntfId():
self.tracer.trace0("Mac entry %r not on any interface" %
tunnel.nexthop_eth_addr)
assert False, "Unlearned nexthop MAC %s" % tunnel.nexthop_eth_addr
intf = mac_entry.intf().to_string()
# Convert the interface names to the kernel interface names
intf = intf.replace("Ethernet", "et")
intf = intf.replace("Port-Channel", "po")
self.tracer.trace5("MAC entry %s is learned on inteface %r" %
(tunnel.nexthop_eth_addr, intf))
tunnel.egress_intf = intf
self.tracer.trace8("Looking up that interface's MAC address")
egress_eth_addr = self.eth_intf_mgr.eth_addr(mac_entry.intf())
if egress_eth_addr == eossdk.EthAddr():
assert False, "Interface %s has no MAC address" % intf
self.tracer.trace5("Intf %s has MAC address %s" %
(intf, egress_eth_addr.to_string()))
tunnel.egress_intf_eth_addr = egress_eth_addr.to_string()
def send_packet(self, dst_ip, tunnel, msg):
""" Wrap `msg` in a UDP-over-MPLS packet, using `dst_ip` and the tunnel's
MPLS label, and send the packet out of the tunnel's egress interface."""
self.tracer.trace8("Sending message %s" % str(msg))
payload = msg.serialize()
pkt = scapy.layers.l2.Ether(src=tunnel.egress_intf_eth_addr,
dst=tunnel.nexthop_eth_addr)
pkt = (pkt / MPLS(label=tunnel.mpls_label, ttl=64) /
scapy.layers.inet.IP(src=self.src_ip,
dst=dst_ip) /
scapy.layers.inet.UDP(dport=UDP_PORT) /
(payload))
# In the real world we might make this non-blocking, but for now
# we assume packet always get sent in one go. Also, it might be
# worth maintaining our own socket to the egress interface to
# save us the work of creating/tearing down raw sockets
# constantly.
scapy.sendrecv.sendp(pkt, iface=tunnel.egress_intf, verbose=0)
def process_config(self):
self.tracer.trace1("Processing configuration change on %s" %
self.config_file)
with open(self.config_file) as f:
cfg = json.loads(f.read())
if not self.initialized:
# Write the src_intf only once.
self.src_intf = cfg["src_intf"]
# Clear out the previous config:
self.remote_switches = {}
# And signify that we are a new process by changing our
# advertised pid. It would be preferable to just only update the
# newly configured tunnels, but that's more complicated for now.
self.pid -= 1
for rs in cfg["remote_switches"]:
dst_ip = rs["destination_ip"]
dst = RemoteSwitch(dst_ip)
for tunnel_key_str, tunnel_info in rs["tunnels"].iteritems():
tunnel_key = int(tunnel_key_str)
dst.egress_tunnels[tunnel_key] = EgressTunnel(
tunnel_info["label"], tunnel_info["nexthop_ip"])
self.remote_switches[dst_ip] = dst
if self.initialized:
self.resolve_config()
def resolve_config(self):
self.tracer.trace2("Resolving all of our configured tunnels")
for host in self.remote_switches.itervalues():
for tunnel in host.egress_tunnels.itervalues():
tunnel.last_update_time = time.time() + STARTUP_GRACEPERIOD
self.resolve_egress_tunnel(tunnel)
self.timeout_time_is(eossdk.now() + POLL_TIME)
def is_new_id(self, last_seen_id, new_id):
# Returns True if the new_id is larger than the last_seen_id, or
# the new_id has wrapped around.
return (last_seen_id < new_id) or ((last_seen_id - new_id) > (MAX_INT/2))
def main(args):
sdk = eossdk.Sdk()
_ = MplsTunnelLivenessAgent(sdk)
sdk.main_loop(args)
if __name__ == "__main__":
main(sys.argv)
|
{
"content_hash": "da0aa97ddcad9d724a87ca11d96990cd",
"timestamp": "",
"source": "github",
"line_count": 491,
"max_line_length": 85,
"avg_line_length": 42.134419551934826,
"alnum_prop": 0.6354408352668214,
"repo_name": "tsuna/EosSdk",
"id": "4a62fffc43801034ca9fe5844085fd97dad77e8a",
"size": "20830",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/MplsTunnelLivenessAgent.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "879"
},
{
"name": "C++",
"bytes": "610451"
},
{
"name": "Python",
"bytes": "504"
},
{
"name": "Shell",
"bytes": "1559"
}
],
"symlink_target": ""
}
|
import inspect
import multiprocessing
import logging
import os
import datetime
import socket
from uuid import getnode as get_mac
_LAZY_M_ENABLED_ = True
IPCType = {'NewSourceJob' : 1,
'UpdateSourceJob': 2,
'InstallSocketCriteria': 3,
'DeleteSocketCriteria': 4,
'NewSocket': 5,
'DeleteSocket': 6,
'AddSkToJobFlow': 7,
'RemoveSkFromJobFlow': 8,
'UpdateControlJob':9}
def IsLazyTableEnabled():
return _LAZY_M_ENABLED_
def composeKey(jobId, flowId):
return '{0}@{1}'.format(jobId, flowId)
def decomposeKey(key):
[jobId, flowId] = key.split('@')
return (int(jobId), int(flowId))
def keyContainJobId(key, jobId):
[keyJobId, keyFlowId] = key.split('@')
return (keyJobId == str(jobId))
def middleJobKeyContainJobIdAndLevel(key, jobId, level):
[keyJobId, _, keyLevel] = key.split('@')
return (keyJobId == str(jobId)) and (keyLevel == str(level))
def composeMiddleJobKey(jobId, flowId, level):
return '{0}@{1}@{2}'.format(jobId, flowId, level)
class SelfIP():
_myIP = None
@staticmethod
def GetSelfIP():
if SelfIP._myIP is None:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('google.com', 0))
SelfIP._myIP = s.getsockname()[0]
s.close()
return SelfIP._myIP
class LogUtil:
_LogLevel_ = logging.DEBUG
_MAN_DEBUG_ = False
_SND_DEBUG_ = False
_RCV_DEBUG_ = False
_CONTROL_DEBUG_ = False
_DIRSERVICE_DEBUG_ = False
_LIB_DEBUG_ = False
_JOB_BUILD_DEBUG_ = False
_SCH_DEBUG_ = False
_CONN_DEBUG_ = False
_PROC_DEBUG_ = False
LoggingLock = multiprocessing.Lock()
LogFileName = ''
DebugFlags = {'manager' : _MAN_DEBUG_,
'sndMod' : _SND_DEBUG_,
'rcvMod' : _RCV_DEBUG_,
'control' : _CONTROL_DEBUG_,
'dir' : _DIRSERVICE_DEBUG_,
'lib' : _LIB_DEBUG_,
'job' : _JOB_BUILD_DEBUG_,
'schedule': _SCH_DEBUG_,
'conn' : _CONN_DEBUG_,
'proc' : _PROC_DEBUG_}
EvalData = []
@staticmethod
def InitLogging():
if not os.path.exists('logs'):
os.makedirs('logs')
hostId = get_mac()
LogUtil.LogFileName = 'logs/agent_{0}_{1}.log'.format(hostId, str(datetime.datetime.now()).translate(None, ' :-.'))
logging.basicConfig(filename=LogUtil.LogFileName, level=LogUtil._LogLevel_,
format='%(levelname)8s,%(asctime)s.%(msecs).3d,%(module)17s,%(funcName)21s,%(lineno)3d,%(message)s',
datefmt='%m/%d/%Y %H:%M:%S')
@staticmethod
def DebugLog(section, *args):
flag = LogUtil.DebugFlags.get(section, False)
if flag:
LogUtil.LoggingLock.acquire()
_, fileName, lineNumber, _, _, _ = inspect.stack()[1]
tmp = fileName.split('/')
fileName = tmp[len(tmp) - 1]
print '\nDEBUG ' + fileName + ', L' + str(lineNumber) + ': '
for i in range(0, len(args)):
print args[i]
print '\n'
LogUtil.LoggingLock.release()
@staticmethod
def EvalLog(eventId, msg):
#logging.debug('{0},{1}'.format(eventId, msg))
LogUtil.EvalData.append('{0},{1}'.format(eventId, msg))
@staticmethod
def OutputEvalLog():
output = open(LogUtil.LogFileName + '.eval', 'a')
for data in LogUtil.EvalData:
print >>output, data
output.close()
LogUtil.EvalData = []
# _loggingLock = multiprocessing.Lock()
#
# _logFileName = None
# _logs = []
#
# def debugLog(module, *args):
# flag = _debugFlags.get(module, False)
# if flag:
# _loggingLock.acquire()
# _, fileName, lineNumber, _, _, _ = inspect.stack()[1]
# tmp = fileName.split('/')
# fileName = tmp[len(tmp) - 1]
# print '\nDEBUG ' + fileName + ', L' + str(lineNumber) + ': '
# for i in range(0, len(args)):
# print args[i]
# print '\n'
# _loggingLock.release()
#
# def SetLogFileName(logFileName):
# global _logFileName
# _logFileName = logFileName
#
# def EvalLog(info):
# #print 'EvalLog: {0}'.format(info)
# global _logs
# _logs.append(info)
#
# def WriteLogs():
# global _logFileName
# global _logs
# if _logFileName and _logs:
# output = open(_logFileName, 'a')
# for log in _logs:
# print >>output, log
# output.close()
# del _logs[:]
|
{
"content_hash": "c21805e42c6578e12f1cb872bbc8c98b",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 128,
"avg_line_length": 29.537974683544302,
"alnum_prop": 0.5451039211484894,
"repo_name": "pupeng/hone",
"id": "07f74f95b82e12ad06047ae4629a94fc6525fe22",
"size": "4855",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "HostAgent/agentUtil.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "10752"
},
{
"name": "C++",
"bytes": "8253"
},
{
"name": "Makefile",
"bytes": "493"
},
{
"name": "Python",
"bytes": "452248"
},
{
"name": "Shell",
"bytes": "1254"
}
],
"symlink_target": ""
}
|
import win32ui
import win32con
import win32api
from pywin.mfc import docview, window, afxres
import commctrl
class GenericFrame(window.MDIChildWnd):
def OnCreateClient(self, cp, context):
# handlers for toolbar buttons
self.HookCommand (self.OnPrevious, 401)
self.HookCommand (self.OnNext, 402)
# Its not necessary for us to hook both of these - the
# common controls should fall-back all by themselves.
# Indeed, given we hook TTN_NEEDTEXTW, commctrl.TTN_NEEDTEXTA
# will not be called.
self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXT)
self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXTW)
# parent = win32ui.GetMainFrame()
parent = self
style = win32con.WS_CHILD | win32con.WS_VISIBLE | \
afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY
buttons = (win32ui.ID_APP_ABOUT,win32ui.ID_VIEW_INTERACTIVE)
bitmap = win32ui.IDB_BROWSER_HIER
tbid = 0xE840
self.toolbar = tb = win32ui.CreateToolBar (parent, style, tbid)
tb.LoadBitmap(bitmap)
tb.SetButtons(buttons)
tb.EnableDocking(afxres.CBRS_ALIGN_ANY)
tb.SetWindowText("Test")
parent.EnableDocking(afxres.CBRS_ALIGN_ANY)
parent.DockControlBar(tb)
parent.LoadBarState("ToolbarTest")
window.MDIChildWnd.OnCreateClient(self, cp, context)
return 1
def OnDestroy(self, msg):
self.SaveBarState("ToolbarTest")
def GetTTText(self, std, extra):
(hwndFrom, idFrom, code) = std
text, hinst, flags = extra
if flags & commctrl.TTF_IDISHWND:
return # Not handled
if (idFrom==win32ui.ID_APP_ABOUT):
# our 'extra' return value needs to be the following
# entries from a NMTTDISPINFO[W] struct:
# (szText, hinst, uFlags). None means 'don't change
# the value'
return 0, ("It works!", None, None)
return None # not handled.
def GetMessageString(self, id):
if id==win32ui.ID_APP_ABOUT:
return "Dialog Test\nTest"
else:
return self._obj_.GetMessageString(id)
def OnSize (self, params):
print('OnSize called with ', params)
def OnNext (self, id, cmd):
print('OnNext called')
def OnPrevious (self, id, cmd):
print('OnPrevious called')
msg = """\
This toolbar was dynamically created.\r
\r
The first item's tooltips is provided by Python code.\r
\r
(Dont close the window with the toolbar in a floating state - it may not re-appear!)\r
"""
def test():
template = docview.DocTemplate( win32ui.IDR_PYTHONTYPE, None, GenericFrame, docview.EditView)
doc = template.OpenDocumentFile(None)
doc.SetTitle("Toolbar Test")
view = doc.GetFirstView()
view.SetWindowText(msg)
if __name__=='__main__':
import demoutils
if demoutils.NeedGoodGUI():
test()
|
{
"content_hash": "e8562a850389a525f73a3f2498b9fa01",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 94,
"avg_line_length": 30.147727272727273,
"alnum_prop": 0.7218243497926875,
"repo_name": "Microsoft/PTVS",
"id": "30e7346c69f64bcef5850e1b8973682d63396a27",
"size": "2745",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12235396"
},
{
"name": "C++",
"bytes": "212001"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "44322"
},
{
"name": "Python",
"bytes": "847130"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
}
|
"""
Volume driver for NetApp Data ONTAP (C-mode) FibreChannel storage systems.
"""
from cinder import interface
from cinder.volume import driver
from cinder.volume.drivers.netapp.dataontap import block_cmode
from cinder.zonemanager import utils as fczm_utils
@interface.volumedriver
class NetAppCmodeFibreChannelDriver(driver.BaseVD,
driver.ConsistencyGroupVD,
driver.ManageableVD,
driver.ExtendVD,
driver.TransferVD,
driver.SnapshotVD):
"""NetApp C-mode FibreChannel volume driver."""
DRIVER_NAME = 'NetApp_FibreChannel_Cluster_direct'
# ThirdPartySystems wiki page
CI_WIKI_NAME = "NetApp_CI"
VERSION = block_cmode.NetAppBlockStorageCmodeLibrary.VERSION
def __init__(self, *args, **kwargs):
super(NetAppCmodeFibreChannelDriver, self).__init__(*args, **kwargs)
self.library = block_cmode.NetAppBlockStorageCmodeLibrary(
self.DRIVER_NAME, 'FC', **kwargs)
def do_setup(self, context):
self.library.do_setup(context)
def check_for_setup_error(self):
self.library.check_for_setup_error()
def create_volume(self, volume):
self.library.create_volume(volume)
def create_volume_from_snapshot(self, volume, snapshot):
self.library.create_volume_from_snapshot(volume, snapshot)
def create_cloned_volume(self, volume, src_vref):
self.library.create_cloned_volume(volume, src_vref)
def delete_volume(self, volume):
self.library.delete_volume(volume)
def create_snapshot(self, snapshot):
self.library.create_snapshot(snapshot)
def delete_snapshot(self, snapshot):
self.library.delete_snapshot(snapshot)
def get_volume_stats(self, refresh=False):
return self.library.get_volume_stats(refresh,
self.get_filter_function(),
self.get_goodness_function())
def get_default_filter_function(self):
return self.library.get_default_filter_function()
def get_default_goodness_function(self):
return self.library.get_default_goodness_function()
def extend_volume(self, volume, new_size):
self.library.extend_volume(volume, new_size)
def ensure_export(self, context, volume):
return self.library.ensure_export(context, volume)
def create_export(self, context, volume, connector):
return self.library.create_export(context, volume)
def remove_export(self, context, volume):
self.library.remove_export(context, volume)
def manage_existing(self, volume, existing_ref):
return self.library.manage_existing(volume, existing_ref)
def manage_existing_get_size(self, volume, existing_ref):
return self.library.manage_existing_get_size(volume, existing_ref)
def unmanage(self, volume):
return self.library.unmanage(volume)
@fczm_utils.AddFCZone
def initialize_connection(self, volume, connector):
return self.library.initialize_connection_fc(volume, connector)
@fczm_utils.RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
return self.library.terminate_connection_fc(volume, connector,
**kwargs)
def get_pool(self, volume):
return self.library.get_pool(volume)
def create_consistencygroup(self, context, group):
return self.library.create_consistencygroup(group)
def delete_consistencygroup(self, context, group, volumes):
return self.library.delete_consistencygroup(group, volumes)
def update_consistencygroup(self, context, group,
add_volumes=None, remove_volumes=None):
return self.library.update_consistencygroup(group, add_volumes=None,
remove_volumes=None)
def create_cgsnapshot(self, context, cgsnapshot, snapshots):
return self.library.create_cgsnapshot(cgsnapshot, snapshots)
def delete_cgsnapshot(self, context, cgsnapshot, snapshots):
return self.library.delete_cgsnapshot(cgsnapshot, snapshots)
def create_consistencygroup_from_src(self, context, group, volumes,
cgsnapshot=None, snapshots=None,
source_cg=None, source_vols=None):
return self.library.create_consistencygroup_from_src(
group, volumes, cgsnapshot=cgsnapshot, snapshots=snapshots,
source_cg=source_cg, source_vols=source_vols)
def failover_host(self, context, volumes, secondary_id=None):
return self.library.failover_host(
context, volumes, secondary_id=secondary_id)
|
{
"content_hash": "d9a5822a469f3b46d430641736702d94",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 76,
"avg_line_length": 39.096,
"alnum_prop": 0.6470227133210559,
"repo_name": "Nexenta/cinder",
"id": "1c070cbf1d458e5c73ae8997a23b4305876c79af",
"size": "5580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/netapp/dataontap/fc_cmode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18007018"
},
{
"name": "Shell",
"bytes": "13543"
}
],
"symlink_target": ""
}
|
from django.http import HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context import RequestContext
from resumes.models import UserResume
def list_resume(request):
return HttpResponse()
def view_resume(request, resume_id=None, user_id=None):
resume = None
if resume_id:
resume = get_object_or_404(UserResume, id=resume_id)
elif user_id:
resume = get_object_or_404(UserResume, user__id=user_id)
return render_to_response('resumes/resume.html', {'resume': resume, }, context_instance=RequestContext(request))
def export_resume(request, resume_id=None, user_id=None):
resume = None
if resume_id:
resume = get_object_or_404(UserResume, id=resume_id)
elif user_id:
resume = get_object_or_404(UserResume, user__id=user_id)
return render_to_response('resumes/resume.html', {'resume': resume, }, context_instance=RequestContext(request))
|
{
"content_hash": "60d008edfe752fb836cef9a957fb9f78",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 116,
"avg_line_length": 33.13793103448276,
"alnum_prop": 0.7148803329864725,
"repo_name": "juanyunis/django-resumes",
"id": "4ffa0b90272f04e3891bac9cba41988bb4c7bf94",
"size": "961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resumes/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33028"
}
],
"symlink_target": ""
}
|
"""
productporter.app
~~~~~~~~~~~~~~~~~
manages the app creation and configuration process
:copyright: (c) 2014 by the ProductPorter Team.
:license: BSD, see LICENSE for more details.
"""
import os
import logging
import datetime
from flask import Flask
from productporter.weixin.views import weixin
from productporter.product.views import product
from productporter.user.views import user
from productporter.user.models import Guest, User
from productporter.utils.helper import render_markup, root_url_prefix, \
is_online, can_translate, can_comment, can_review, can_report, \
can_topic, can_setgroup, format_date, is_moderator, is_admin
# extensions
from productporter.extensions import db, cache, themes, login_manager, migrate
# default config
from productporter.configs.default import DefaultConfig
from flask.ext.login import current_user
def create_app(config=None):
"""
Creates the app.
"""
static_url_path = ''
instance_path = None
if config is None:
static_url_path = DefaultConfig.ROOT_URL_PREFIX + '/static'
instance_path = DefaultConfig.INSTANCE_PATH
else:
static_url_path = config.ROOT_URL_PREFIX + '/static'
instance_path = config.INSTANCE_PATH
# Initialize the app
app = Flask("productporter",
static_url_path=static_url_path,
instance_path=instance_path)
# Use the default config and override it afterwards
app.config.from_object('productporter.configs.default.DefaultConfig')
# Update the config
app.config.from_object(config)
configure_blueprints(app)
configure_extensions(app)
configure_template_filters(app)
configure_context_processors(app)
configure_before_handlers(app)
configure_errorhandlers(app)
configure_logging(app)
return app
def configure_blueprints(app):
"""
Configures the blueprints
"""
app.register_blueprint(weixin, url_prefix=root_url_prefix(app, 'WEIXIN_URL_PREFIX'))
app.register_blueprint(product, url_prefix=root_url_prefix(app, 'PRODUCT_URL_PREFIX'))
app.register_blueprint(user, url_prefix=root_url_prefix(app, 'USER_URL_PREFIX'))
def configure_extensions(app):
"""
Configures the extensions
"""
# Flask-SQLAlchemy
db.init_app(app)
# Flask-Migrate
migrate.init_app(app, db)
# Flask-Cache
cache.init_app(app)
# Flask-Themes
themes.init_themes(app, app_identifier="productporter")
# Flask-Login
login_manager.login_view = app.config["LOGIN_VIEW"]
login_manager.refresh_view = app.config["REAUTH_VIEW"]
login_manager.anonymous_user = Guest
@login_manager.user_loader
def load_user(id):
"""
Loads the user. Required by the `login` extension
"""
u = db.session.query(User).filter(User.id == id).first()
if u:
return u
else:
return None
login_manager.init_app(app)
def configure_template_filters(app):
"""
Configures the template filters
"""
app.jinja_env.filters['markup'] = render_markup
app.jinja_env.filters['format_date'] = format_date
app.jinja_env.filters['is_online'] = is_online
app.jinja_env.filters['can_translate'] = can_translate
app.jinja_env.filters['can_comment'] = can_comment
app.jinja_env.filters['can_review'] = can_review
app.jinja_env.filters['can_report'] = can_report
app.jinja_env.filters['can_topic'] = can_topic
app.jinja_env.filters['can_setgroup'] = can_setgroup
app.jinja_env.filters['is_moderator'] = is_moderator
app.jinja_env.filters['is_admin'] = is_admin
def configure_context_processors(app):
"""
Configures the context processors
"""
pass
def configure_before_handlers(app):
"""
Configures the before request handlers
"""
@app.before_request
def update_lastseen():
"""
Updates `lastseen` before every reguest if the user is authenticated
"""
if current_user.is_authenticated():
current_user.lastseen = datetime.datetime.utcnow()
db.session.add(current_user)
db.session.commit()
def configure_errorhandlers(app):
"""
Configures the error handlers
"""
# @app.errorhandler(403)
# def forbidden_page(error):
# return render_template("errors/forbidden_page.html"), 403
# @app.errorhandler(404)
# def page_not_found(error):
# return render_template("errors/page_not_found.html"), 404
# @app.errorhandler(500)
# def server_error_page(error):
# return render_template("errors/server_error.html"), 500
def configure_logging(app):
"""
Configures logging.
"""
logs_folder = os.path.join(app.instance_path, "logs")
from logging.handlers import SMTPHandler
formatter = logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]')
info_log = os.path.join(logs_folder, app.config['INFO_LOG'])
info_file_handler = logging.handlers.RotatingFileHandler(
info_log,
maxBytes=100000,
backupCount=10
)
info_file_handler.setLevel(logging.INFO)
info_file_handler.setFormatter(formatter)
app.logger.addHandler(info_file_handler)
error_log = os.path.join(logs_folder, app.config['ERROR_LOG'])
error_file_handler = logging.handlers.RotatingFileHandler(
error_log,
maxBytes=100000,
backupCount=10
)
error_file_handler.setLevel(logging.ERROR)
error_file_handler.setFormatter(formatter)
app.logger.addHandler(error_file_handler)
if app.config["SEND_LOGS"]:
mail_handler = \
SMTPHandler(app.config['MAIL_SERVER'],
app.config['MAIL_SENDER'],
app.config['ADMINS'],
'ProductPorter application error',
(
app.config['MAIL_USERNAME'],
app.config['MAIL_PASSWORD'],
))
mail_handler.setLevel(logging.ERROR)
mail_handler.setFormatter(logging.Formatter('''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''))
app.logger.addHandler(mail_handler)
|
{
"content_hash": "31e1e99bc854242de89c301afca0d550",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 90,
"avg_line_length": 29.726027397260275,
"alnum_prop": 0.6351766513056836,
"repo_name": "kamidox/weixin_producthunt",
"id": "db3f53a79f25e5bf5e880d3d0adb6dbef71833f7",
"size": "6534",
"binary": false,
"copies": "1",
"ref": "refs/heads/v2",
"path": "productporter/app.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "60290"
},
{
"name": "JavaScript",
"bytes": "458581"
},
{
"name": "Makefile",
"bytes": "474"
},
{
"name": "PHP",
"bytes": "38"
},
{
"name": "Python",
"bytes": "144395"
}
],
"symlink_target": ""
}
|
doc = """\
Storage Compute manager
"""
import sys
from gevent import monkey
from gevent.subprocess import Popen, PIPE
monkey.patch_all()
import os
import glob
import socket
import subprocess
import json
import time
import datetime
import pdb
import re
import argparse
import ConfigParser
import signal
import syslog
global HOME_ENV_PATH
HOME_ENV_PATH = '/root'
from stats_daemon.sandesh.storage.ttypes import *
from pysandesh.sandesh_base import *
from sandesh_common.vns.ttypes import Module, NodeType
from sandesh_common.vns.constants import ModuleNames, NodeTypeNames,\
Module2NodeType, INSTANCE_ID_DEFAULT
from sandesh_common.vns.constants import *
def usage():
print doc
sys.exit(255)
'''
EventManager class is used to creates events and send the same to \
sandesh server(opserver)
'''
class osdMap:
osd_disk = ''
osd = ''
osd_journal = ''
class diskUsage:
disk = ''
disk_used = ''
disk_avail = ''
disk_size = ''
class prevOsdLatency:
# primary osd read latency sum
prev_op_rsum = 0
# replica osd read latency sum
prev_subop_rsum = 0
# primary osd total read latency samples
prev_op_rcount = 0
# replica osd total read latency samples
prev_subop_rcount = 0
# primary osd write latency sum
prev_op_wsum = 0
# replica osd write latency sum
prev_subop_wsum = 0
# primary osd total write latency samples
prev_op_wcount = 0
# replica osd total write latency samples
prev_subop_wcount = 0
class EventManager:
rules_data = []
headers = dict()
process_state_db = {}
prev_list = []
def __init__(self, node_type):
self.stdin = sys.stdin
self.stdout = sys.stdout
self.stderr = sys.stderr
self.max_cores = 4
self.max_old_cores = 3
self.max_new_cores = 1
self.node_type = node_type
self._hostname = socket.gethostname()
self.dict_of_osds = dict()
self.prev_latency_dict = dict()
self.units = self.init_units()
self.curr_read_kbytes = 0
self.curr_write_kbytes = 0
self.curr_reads = 0
self.curr_writes = 0
self.curr_read_latency = 0
self.curr_write_latency = 0
pattern = 'rm -rf ceph.conf; ln -s /etc/ceph/ceph.conf ceph.conf'
self.call_subprocess(pattern)
def exec_local(self, arg):
ret = Popen('%s' %(arg), shell=True,
stdout=PIPE).stdout.read()
ret = ret[:-1]
return ret
def cleanup_pid(self, pid):
pid_list = []
proc_run = self.exec_local('ps -ef | grep -w %s | grep -v grep | wc -l'
%(pid))
if proc_run != '0':
my_pid = os.getpid()
procs = self.exec_local('ps -ef | grep -w %s | grep -v grep'
%(pid))
lines = procs.splitlines()
for line in lines:
pid = line.split()[1]
pid_list.append(pid)
while len(pid_list) != 0:
for pid in pid_list:
running = self.exec_local('ps -ef | grep -w %s | \
grep -v grep | awk \'{print $2}\' | \
grep -w %s | wc -l' %(pid, pid))
if running != '0':
self.exec_local('kill -9 %s' %(pid))
else:
pid_list.remove(pid)
time.sleep(5)
#end cleanup_pid
def init_units(self):
units = dict();
units['K'] = 1024
units['M'] = int(units['K']) * 1024
units['G'] = int(units['M']) * 1024
units['T'] = int(units['G']) * 1024
units['P'] = int(units['T']) * 1024
return units
'''
This function is a wrapper for subprocess call. Timeout functionality
is used to timeout after 5 seconds of no response from subprocess call
and the corresponding cmd will be logged into syslog
'''
def call_subprocess(self, cmd):
times = datetime.datetime.now()
# latest 14.0.4 requires "HOME" env variable to be passed
# copy current environment variables and add "HOME" variable
# pass the newly created environment variable to Popen subprocess
env_home = os.environ.copy()
env_home['HOME'] = HOME_ENV_PATH
# stdout and stderr are redirected.
# stderr not used (stdout validation is done so stderr check is
# is not needed)
try:
p = Popen(cmd, stdout=PIPE, \
stderr=PIPE, shell=True, env=env_home)
while p.poll() is None:
gevent.sleep(0.1)
now = datetime.datetime.now()
diff = now - times
if diff.seconds > 5:
#os.kill(p.pid, signal.SIGKILL)
os.waitpid(-1, os.WNOHANG)
message = "command:" + cmd + " ---> hanged"
ssdlog = StorageStatsDaemonLog(message = message)
self.call_send(ssdlog)
self.cleanup_pid(p.pid)
return None
except:
pass
return None
# stdout is used
return p.stdout.read()
def call_send(self, send_inst):
#sys.stderr.write('sending UVE:' +str(send_inst))
send_inst.send()
'''
This function reads the ceph cluster status
Parses the health status output and gets error reason if present \
StorageCluster object created and information is Populated
UVE send call invoked to send the StorageCluster object
'''
def create_and_send_cluster_stats(self, mon_id):
res = self.call_subprocess('/usr/bin/ceph health detail | grep -v ^pg')
if res is None:
return
cluster_id = self.exec_local('ceph --admin-daemon \
/var/run/ceph/ceph-mon.%s.asok quorum_status | \
grep fsid | \
cut -d \'"\' -f4' %(mon_id))
cluster_stats = StorageCluster()
cluster_stats.cluster_id = cluster_id
cluster_stats.name = cluster_id
status = res.split(' ')[0]
status_count = res.count(' ')
if status_count >= 1:
detail_info = res.split(' ', 1)[1]
summary_info = res.split(' ', 1)[1].splitlines()[0]
osd_stat = self.call_subprocess('/usr/bin/ceph osd dump | \
egrep -w \'(down|out)\' | cut -d \' \' -f 1')
multiple = 0
osd_string = ''
if osd_stat != '':
osd_entries = osd_stat.splitlines()
for osd_entry in osd_entries:
if osd_entry != '':
if osd_string == '':
osd_string = osd_entry
else:
multiple = 1
osd_string = osd_string + ', ' + osd_entry
if multiple == 0:
detail_info = detail_info + (' OSD %s is down' %(osd_string))
summary_info = summary_info + ('; OSD %s is down' %(osd_string))
else:
detail_info = detail_info + (' OSDs %s are down' %(osd_string))
summary_info = summary_info + ('; OSDs %s are down' %(osd_string))
detail_info = re.sub('\n', ';', detail_info)
else:
status = res.splitlines()[0]
detail_info = ''
summary_info = ''
cluster_info = ClusterInfo()
if status == 'HEALTH_OK':
cluster_info.status = 0
elif status == 'HEALTH_WARN':
cluster_info.status = 1
elif status == 'HEALTH_ERR':
cluster_info.status = 2
cluster_info.health_detail = detail_info
cluster_info.health_summary = summary_info
cluster_stats.info_stats = [cluster_info]
cluster_stats_trace = StorageClusterTrace(data=cluster_stats)
self.call_send(cluster_stats_trace)
'''
This function reads the ceph rados statistics.
Parses this statistics output and gets the read_cnt/read_bytes \
write_cnt/write_bytes. ComputeStoragePool object created and all \
the above statictics are assigned
UVE send call invoked to send the ComputeStoragePool object
'''
def create_and_send_pool_stats(self):
res = self.call_subprocess('/usr/bin/rados df')
if res is None:
return
arr = res.splitlines()
for line in arr:
if line != arr[0]:
result = re.sub(
'\s+', ' ', line).strip()
arr1 = result.split()
READS = 7
READKBS = 8
WRITES = 9
WRITEKBS = 10
if len(arr1) == 10:
READS = 6
READKBS = 7
WRITES = 8
WRITEKBS = 9
if arr1[0] != "total":
cs_pool = ComputeStoragePool()
cs_pool.name = self._hostname + ':' + arr1[0]
pool_stats = PoolStats()
pool_stats.reads = int(arr1[READS])
pool_stats.read_kbytes = int(arr1[READKBS])
pool_stats.writes = int(arr1[WRITES])
pool_stats.write_kbytes = int(arr1[WRITEKBS])
cs_pool.info_stats = [pool_stats]
pool_stats_trace = ComputeStoragePoolTrace(data=cs_pool)
self.call_send(pool_stats_trace)
def populate_osd_total_stats(self, osdname, osd_stats, prev_osd_latency):
ceph_name = "ceph-" + osdname + ".asok"
cmd = "ceph --admin-daemon /var/run/ceph/" + ceph_name + \
" perf dump | egrep -w \"\\\"op_w\\\":|\\\"" + \
"op_r\\\":|\\\"subop_r\\\":|\\\"subop_w\\\":|\\\"" + \
"op_r_out_bytes\\\":|\\\"subop_r_out_bytes\\\":|" + \
"\\\"op_w_in_bytes\\\":|\\\"subop_w_in_bytes\\\":\""
try:
res1 = self.call_subprocess(cmd)
if res1 is None:
return False
osd_stats.stats_time = datetime.datetime.now()
arr1 = res1.splitlines()
for line1 in arr1:
result = re.sub('\s+', ' ', line1).strip()
line2 = result.split(":")
if len(line2) != 0:
if line2[0].find('subop_r_out_bytes') != -1 or \
line2[0].find('op_r_out_bytes') != -1:
osd_stats.read_kbytes += int(
line2[1].rstrip(",").strip(' ')) / 1024
elif line2[0].find('subop_w_in_bytes') != -1 or \
line2[0].find('op_w_in_bytes') != -1:
osd_stats.write_kbytes += int(
line2[1].rstrip(",").strip(' ')) / 1024
elif line2[0].find('subop_r') != -1 or \
line2[0].find('op_r') != -1:
osd_stats.reads += int(
line2[1].rstrip(",").strip(' '))
elif line2[0].find('subop_w') != -1 or \
line2[0].find('op_w') != -1:
osd_stats.writes += int(
line2[1].rstrip(",").strip(' '))
except:
pass
res2 = self.populate_osd_latency_stats(osdname, osd_stats,
prev_osd_latency)
if res2 is None:
return False
return True
def diff_read_kbytes(self, line, osd_stats, temp_osd_stats,
osd_prev_stats, diff_time):
# 'line' format : " xyz,"
self.curr_read_kbytes += int(line.rstrip(",").strip(' ')) / 1024
temp_osd_stats.read_kbytes = self.curr_read_kbytes
osd_stats.read_kbytes = self.curr_read_kbytes - \
osd_prev_stats.read_kbytes
osd_stats.read_kbytes = int(osd_stats.read_kbytes / diff_time)
def diff_write_kbytes(self, line, osd_stats, temp_osd_stats,
osd_prev_stats, diff_time):
# 'line' format : " xyz,"
self.curr_write_kbytes += int(line.rstrip(",").strip(' ')) / 1024
temp_osd_stats.write_kbytes = self.curr_write_kbytes
osd_stats.write_kbytes = self.curr_write_kbytes - \
osd_prev_stats.write_kbytes
osd_stats.write_kbytes = int(osd_stats.write_kbytes / diff_time)
def diff_read_cnt(self, line, osd_stats, temp_osd_stats,
osd_prev_stats, diff_time):
# 'line' format : " xyz,"
self.curr_reads += int(line.rstrip(",").strip(' '))
temp_osd_stats.reads = self.curr_reads
osd_stats.reads = self.curr_reads - \
osd_prev_stats.reads
osd_stats.reads = int(osd_stats.reads / diff_time)
def diff_write_cnt(self, line, osd_stats, temp_osd_stats,
osd_prev_stats, diff_time):
# 'line' format : " xyz,"
self.curr_writes += int(line.rstrip(",").strip(' '))
temp_osd_stats.writes = self.curr_writes
osd_stats.writes = self.curr_writes - \
osd_prev_stats.writes
osd_stats.writes = int(osd_stats.writes / diff_time)
def populate_osd_diff_stats(self, osdname, osd_stats,
temp_osd_stats, osd_prev_stats):
ceph_name = "ceph-" + osdname + ".asok"
cmd = "ceph --admin-daemon /var/run/ceph/" + ceph_name + \
" perf dump | egrep -w \"\\\"op_w\\\":|\\\"" + \
"op_r\\\":|\\\"subop_r\\\":|\\\"subop_w\\\":|\\\"" + \
"op_r_out_bytes\\\":|\\\"subop_r_out_bytes\\\":|" + \
"\\\"op_w_in_bytes\\\":|\\\"subop_w_in_bytes\\\":\""
try:
res1 = self.call_subprocess(cmd)
if res1 is None:
return False
stats_time = datetime.datetime.now()
diff_time = stats_time - osd_prev_stats.stats_time
fdiff_time = float(diff_time.seconds) + \
float(diff_time.microseconds)/1000000
temp_osd_stats.stats_time = stats_time
arr1 = res1.splitlines()
for line1 in arr1:
result = re.sub('\s+', ' ', line1).strip()
line2 = result.split(":")
if len(line2) != 0:
if line2[0].find('subop_r_out_bytes') != -1 or \
line2[0].find('op_r_out_bytes') != -1:
self.diff_read_kbytes(line2[1],
osd_stats,
temp_osd_stats,
osd_prev_stats,
fdiff_time)
elif line2[0].find('subop_w_in_bytes') != -1 or \
line2[0].find('op_w_in_bytes') != -1:
self.diff_write_kbytes(line2[1],
osd_stats,
temp_osd_stats,
osd_prev_stats,
fdiff_time)
elif line2[0].find('subop_r') != -1 or \
line2[0].find('op_r') != -1:
self.diff_read_cnt(line2[1],
osd_stats,
temp_osd_stats,
osd_prev_stats,
fdiff_time)
elif line2[0].find('subop_w') != -1 or \
line2[0].find('op_w') != -1:
self.diff_write_cnt(line2[1],
osd_stats,
temp_osd_stats,
osd_prev_stats,
fdiff_time)
except:
pass
return True
def compute_read_latency(self, arr, osd_stats,
prev_osd_latency, op_flag):
# 'line' format : ['op_read_latency', 'avgcount', '2822,', 'sum', '240.2423},']
avgcount = int(arr[2].rstrip(","))
# 'arr' format : "'sum': xyz.yzw},"
sum_rlatency = int(float(arr[4].rstrip("},")))
# sum_rlatency is in seconds
# multiplied by 1000 to convert seconds to milliseconds
if avgcount != 0:
# op_flag = 1 indicates replica osd read latency
if op_flag == 1:
if(avgcount > prev_osd_latency.prev_subop_rcount):
osd_stats.op_r_latency += ((sum_rlatency * 1000) - \
(prev_osd_latency.prev_subop_rsum * 1000)) / \
(avgcount - prev_osd_latency.prev_subop_rcount)
prev_osd_latency.prev_subop_rsum = sum_rlatency
prev_osd_latency.prev_subop_rcount = avgcount
# op_flag = 2 indicates primary osd read latency
if op_flag == 2:
if(avgcount > prev_osd_latency.prev_op_rcount):
osd_stats.op_r_latency += ((sum_rlatency * 1000) - \
(prev_osd_latency.prev_op_rsum * 1000)) / \
(avgcount - prev_osd_latency.prev_op_rcount)
prev_osd_latency.prev_op_rsum = sum_rlatency
prev_osd_latency.prev_op_rcount = avgcount
else:
osd_stats.op_r_latency += 0
# op_flag = 1 indicates replica osd read latency
if op_flag == 1:
prev_osd_latency.prev_subop_rsum = 0
prev_osd_latency.prev_subop_rcount = 0
# op_flag = 2 indicates primary osd read latency
if op_flag == 2:
prev_osd_latency.prev_op_rsum = 0
prev_osd_latency.prev_op_rcount = 0
def compute_write_latency(self, arr, osd_stats,
prev_osd_latency, op_flag):
# 'line' format : ['op_read_latency', 'avgcount', '2822,', 'sum', '240.2423},']
avgcount = int(arr[2].rstrip(","))
# 'arr' format : "'sum': xyz.yzw},"
sum_wlatency = int(float(arr[4].rstrip("},")))
# sum_wlatency is in seconds
# multiplied by 1000 to convert seconds to milliseconds
if avgcount != 0:
# op_flag = 1 indicates replica osd write latency
if op_flag == 1:
if(avgcount > prev_osd_latency.prev_subop_wcount):
osd_stats.op_w_latency += ((sum_wlatency * 1000) - \
(prev_osd_latency.prev_subop_wsum * 1000)) / \
(avgcount - prev_osd_latency.prev_subop_wcount)
prev_osd_latency.prev_subop_wsum = sum_wlatency
prev_osd_latency.prev_subop_wcount = avgcount
# op_flag = 2 indicates primary osd write latency
if op_flag == 2:
if(avgcount > prev_osd_latency.prev_op_wcount):
osd_stats.op_w_latency += ((sum_wlatency * 1000) - \
(prev_osd_latency.prev_op_wsum * 1000)) / \
(avgcount - prev_osd_latency.prev_op_wcount)
prev_osd_latency.prev_op_wsum = sum_wlatency
prev_osd_latency.prev_op_wcount = avgcount
else:
osd_stats.op_w_latency += 0
# op_flag = 1 indicates replica osd write latency
if op_flag == 1:
prev_osd_latency.prev_subop_wsum = 0
prev_osd_latency.prev_subop_wcount = 0
# op_flag = 2 indicates primary osd write latency
if op_flag == 2:
prev_osd_latency.prev_op_wsum = 0
prev_osd_latency.prev_op_wcount = 0
def populate_osd_latency_stats(self, osdname, osd_stats, prev_osd_latency):
lat_list={"op_r_latency","subop_r_latency","op_w_latency","subop_w_latency"}
for entry in lat_list:
ceph_name = "ceph-" + osdname + ".asok"
cmd = ('ceph --admin-daemon /var/run/ceph/%s perf dump | \
egrep -A5 -w %s |tr \"\\\"\" \" \" | \
awk \'BEGIN{start=0;title=\"\";avgcount=\"\";sum=\"\"} \
{i=1;while (i<=NF) {if($i == \"{\"){start=1} \
if($i == \"}\" && start==1){break} \
if($i==\"%s\"){title=$i} \
if($i==\"avgcount\"){i=i+2;avgcount=$i} \
if($i==\"sum\"){i=i+2;sum=$i}i=i+1}} \
END{print title \" avgcount \" avgcount \" sum \" sum}\''
%(ceph_name, entry, entry))
res = self.call_subprocess(cmd)
if res is None:
return False
res.lstrip(' ')
line = res.split(' ')
# subop_r_latency: replica osd read latency value
if line[0] == 'subop_r_latency':
self.compute_read_latency(line,
osd_stats, prev_osd_latency, 1)
# op_r_latency: primary osd read latency value
elif line[0] == 'op_r_latency':
self.compute_read_latency(line,
osd_stats, prev_osd_latency, 2)
# subop_w_latency: replica osd write latency value
elif line[0] == 'subop_w_latency':
self.compute_write_latency(line,
osd_stats, prev_osd_latency, 1)
# op_w_latency: primary osd write latency value
elif line[0] == 'op_w_latency':
self.compute_write_latency(line,
osd_stats, prev_osd_latency, 2)
return True
'''
This function checks if an osd is active, if yes parses output of \
osd dump ComputeStorageOsd object created and statictics are assigned
UVE send call invoked to send the ComputeStorageOsd object
'''
def create_and_send_osd_stats(self):
res = self.call_subprocess('ls /var/lib/ceph/osd')
if res is None:
return
arr = res.splitlines()
linecount = 0
for line in arr:
no_prev_osd = 0
cmd = "cat /var/lib/ceph/osd/" + arr[linecount] + "/active"
is_active = self.call_subprocess(cmd)
if is_active is None:
linecount = linecount + 1
continue
#instantiate osd and its state
cs_osd = ComputeStorageOsd()
cs_osd_state = ComputeStorageOsdState()
osd_stats = OsdStats()
temp_osd_stats = OsdStats()
prev_osd_latency = prevOsdLatency()
#initialize fields
osd_stats.reads = 0
osd_stats.writes = 0
osd_stats.read_kbytes = 0
osd_stats.write_kbytes = 0
osd_stats.op_r_latency = 0
osd_stats.op_w_latency = 0
self.curr_read_kbytes = 0
self.curr_write_kbytes = 0
self.curr_reads = 0
self.curr_writes = 0
self.curr_read_latency = 0
self.curr_write_latency = 0
# osd state is active and not down
if is_active == "ok\n":
cs_osd_state.status = "active"
num = arr[linecount].split('-')[1]
uuid = self.exec_local('ceph --admin-daemon \
/var/run/ceph/ceph-osd.%s.asok status 2>/dev/null | \
grep osd_fsid | awk \'{print $2}\' | \
cut -d \'"\' -f 2' %(num))
if uuid is '':
linecount = linecount + 1
continue
cs_osd.uuid = uuid.rstrip("\n")
osd_prev_stats = self.dict_of_osds.get(
cs_osd.uuid)
osd_name = "osd." + num
cs_osd.name = self._hostname + ':' + osd_name
if osd_prev_stats is None:
no_prev_osd = 1
rval = self.populate_osd_total_stats(osd_name,
osd_stats,
prev_osd_latency)
else:
prev_osd_latency = self.prev_latency_dict.get(
cs_osd.uuid)
rval = self.populate_osd_diff_stats(osd_name, osd_stats,
temp_osd_stats,
osd_prev_stats)
if rval == False:
linecount = linecount + 1
continue
rval = self.populate_osd_latency_stats(osd_name,
osd_stats,
prev_osd_latency)
if rval == False:
linecount = linecount + 1
continue
else:
cs_osd_state.status = "inactive"
if no_prev_osd == 0:
cs_osd.info_stats = [osd_stats]
cs_osd.info_state = cs_osd_state
osd_stats_trace = ComputeStorageOsdTrace(
data=cs_osd)
self.call_send(osd_stats_trace)
self.dict_of_osds[
cs_osd.uuid] = temp_osd_stats
self.prev_latency_dict[cs_osd.uuid] = prev_osd_latency
else:
self.dict_of_osds[cs_osd.uuid] = osd_stats
self.prev_latency_dict[cs_osd.uuid] = prev_osd_latency
linecount = linecount + 1
def find_osdmaplist(self, osd_map, disk):
for osdmap_obj in osd_map:
if osdmap_obj.osd_disk.find(disk) != -1:
return 'y'
return 'n'
def find_diskusagelist(self, disk_usage, disk):
for disk_usage_obj in disk_usage:
if disk_usage_obj.disk.find(disk) != -1:
return disk_usage_obj
return None
def compute_usage(self, disk_usage_obj, unit):
if unit.isalpha():
return long(float(disk_usage_obj.
disk_used.strip(unit)) * self.units[unit])
return 0
'''
This function parses output of iostat and assigns statistice to \
ComputeStorageDisk
UVE send call invoked to send the ComputeStorageDisk object
'''
def create_and_send_disk_stats(self):
# iostat to get the raw disk list
cmd = 'iostat -x 4 2 | awk \'{arr[NR]=$0} \
END{for(i=(NR/2)+1;i<NR;i++) { print arr[i] }}\''
res = self.call_subprocess(cmd)
if res is None:
return
disk_list = res.splitlines()
# osd disk list to get the mapping of osd to
# raw disk
# cd to /etc/ceph so that ceph-deploy command logs output to
# /var/log/ceph and not /root
# pattern = 'cd /etc/ceph && ceph-deploy disk list %s 2>&1' \
# %(self._hostname)
pattern = 'cat /proc/mounts | grep "\/var\/lib\/ceph\/osd\/ceph"'
res = self.call_subprocess(pattern)
if res is None:
return
osd_list = res.splitlines()
osd_map = []
for line in osd_list:
arr1 = line.split()
osd_map_obj = osdMap()
osd_map_obj.osd_disk = arr1[0]
osd_map_obj.osd = 'osd.%s' %(arr1[1].split('-')[1])
cmd = 'ls -l %s/journal | awk \'{print $11}\'' %(arr1[1])
journal_uuid = self.call_subprocess(cmd).strip('\r\n')
cmd = 'ls -l %s | awk \'{print $11}\'' %(journal_uuid)
journal_disk = self.call_subprocess(cmd).strip('\r\n')
if journal_disk[0] != '/':
journal_disk = '/dev/%s' %(journal_disk.split('/')[2])
osd_map_obj.osd_journal = journal_disk
osd_map.append(osd_map_obj)
# df used to get the free space of all disks
res1 = self.call_subprocess('df -hl')
if res1 is None:
return
df_list = res1.splitlines()
disk_usage = []
for line in df_list:
if line.find('sda') != -1 or \
line.find('vda') != -1:
# replace multiple spaces to single
# space here
result = re.sub(
'\s+', ' ', line).strip()
arr1 = result.split()
if arr1[5] == '/':
disk_usage_obj = diskUsage()
disk_usage_obj.disk = arr1[0]
disk_usage_obj.disk_size = arr1[1]
disk_usage_obj.disk_used = arr1[2]
disk_usage_obj.disk_avail = arr1[3]
disk_usage.append(disk_usage_obj)
elif line.find('sd') != -1 or \
line.find('vd') != -1:
# replace multiple spaces to single
# space here
result = re.sub(
'\s+', ' ', line).strip()
arr1 = result.split()
disk_usage_obj = diskUsage()
disk_usage_obj.disk = arr1[0]
disk_usage_obj.disk_size = arr1[1]
disk_usage_obj.disk_used = arr1[2]
disk_usage_obj.disk_avail = arr1[3]
disk_usage.append(disk_usage_obj)
# create a dictionary of disk_name: model_num + serial_num
new_dict = dict()
resp = self.call_subprocess('ls -l /dev/disk/by-id/')
if resp is None:
return
arr_disks = resp.splitlines()
for line in arr_disks[1:]:
resp1 = line.split()
if (resp1[-1].find('sd') != -1 and
resp1[8].find('part') == -1 and
resp1[8].find('ata') != -1):
new_dict[resp1[-1].split('/')[2]] = resp1[8]
#cs_disk1 = ComputeStorageDisk()
#cs_disk1.list_of_curr_disks = []
for line in disk_list: # this will have all rows
# replace multiple spaces to single space here
result = re.sub('\s+', ' ', line).strip()
arr1 = result.split()
if len(arr1) != 0 and (arr1[0].find('sd') != -1 or \
arr1[0].find('vd') != -1):
cs_disk = ComputeStorageDisk()
cs_disk.name = self._hostname + ':' + arr1[0]
osd_id = self.exec_local('cat /proc/mounts | \
grep %s | grep ceph | grep -v tmp | \
awk \'{print $2}\' | cut -d \'-\' -f 2'
%(arr1[0]))
if osd_id == '':
cs_disk.uuid = ''
else:
uuid = self.exec_local('ceph --admin-daemon \
/var/run/ceph/ceph-osd.%s.asok status 2>/dev/null | \
grep osd_fsid | awk \'{print $2}\' | \
cut -d \'"\' -f 2' %(osd_id))
cs_disk.uuid = uuid
#cs_disk1.list_of_curr_disks.append(arr1[0])
cs_disk.is_osd_disk = self.find_osdmaplist(osd_map, arr1[0])
disk_usage_obj = self.find_diskusagelist(disk_usage, arr1[0])
if disk_usage_obj is None:
cs_disk.current_disk_usage = 0
else:
last = disk_usage_obj.disk_used[-1:]
cs_disk.current_disk_usage = \
self.compute_usage(disk_usage_obj, last)
disk_stats = DiskStats()
disk_stats.reads = 0
disk_stats.writes = 0
disk_stats.read_kbytes = 0
disk_stats.write_kbytes = 0
disk_stats.op_r_latency = 0
disk_stats.op_w_latency = 0
if arr1[0] in new_dict:
if cs_disk.uuid == '':
cs_disk.uuid = new_dict.get(arr1[0])
disk_stats.iops = int(float(arr1[3]) + float(arr1[4]))
disk_stats.bw = int(float(arr1[5])) + \
int(float(arr1[6]))
disk_stats.reads = int(float(arr1[3]))
disk_stats.writes = int(float(arr1[4]))
disk_stats.read_kbytes = int(float(arr1[5]))
disk_stats.write_kbytes = int(float(arr1[6]))
disk_stats.op_r_latency = int(float(arr1[10]))
disk_stats.op_w_latency = int(float(arr1[11]))
cs_disk.info_stats = [disk_stats]
disk_stats_trace = ComputeStorageDiskTrace(data=cs_disk)
self.call_send(disk_stats_trace)
#cs_disk1_trace = ComputeStorageDiskTrace(data=cs_disk1)
# sys.stderr.write('sending UVE:' +str(cs_disk1_trace))
#if len(set(cs_disk1.list_of_curr_disks).
# difference(set(self.prev_list))) != 0:
# self.call_send(cs_disk1_trace)
#self.prev_list = []
#for i in xrange(0, len(cs_disk1.list_of_curr_disks)-1):
# self.prev_list.append(cs_disk1.list_of_curr_disks[i])
# send UVE for updated process state database
def send_process_state_db(self):
sleep_time = 20
# Check if the mon is the mon leader
# Send pool stats and cluster stats from the mon leader alone
mon_running = self.exec_local('ls /var/run/ceph/ceph-mon*.asok \
2> /dev/null | wc -l')
if mon_running != '0':
mon_id = self.exec_local('ls /var/run/ceph/ceph-mon*.asok | \
cut -d \'.\' -f 2')
mon_leader = self.exec_local('ceph --admin-daemon \
/var/run/ceph/ceph-mon.%s.asok quorum_status | \
grep quorum_leader_name | \
cut -d \'"\' -f4' %(mon_id))
if mon_id == mon_leader:
self.create_and_send_cluster_stats(mon_id)
self.create_and_send_pool_stats()
if self.node_type != "storage-compute":
sleep_time = 10
# Send disk stats from all the storage compute nodes
if self.node_type == "storage-compute":
self.create_and_send_osd_stats()
self.create_and_send_disk_stats()
sleep_time = 6
time.sleep(sleep_time)
return
def runforever(self, test=False):
# sleep for 6 seconds. There is a sleep in iostat for 4 seconds
# send pool/disk/osd information to db
while 1:
self.send_process_state_db()
def parse_args(args_str):
# Source any specified config/ini file
# Turn off help, so we show all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {
'node_type': 'storage-compute',
'log_local': True,
'log_level': 'SYS_NOTICE',
'log_category': '',
'log_file': Sandesh._DEFAULT_LOG_FILE,
}
defaults.update(SandeshConfig.get_default_options(['DEFAULTS']))
sandesh_opts = SandeshConfig.get_default_options()
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
if 'DEFAULTS' in config.sections():
defaults.update(dict(config.items("DEFAULTS")))
SandeshConfig.update_options(sandesh_opts, config)
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
defaults.update(sandesh_opts)
parser.set_defaults(**defaults)
parser.add_argument("--log_local", action="store_true",
help="Enable local logging of sandesh messages")
parser.add_argument("--node_type",
help="node type of the storage")
parser.add_argument("--log_level",
help="Severity level for local logging of sandesh messages")
parser.add_argument("--log_category",
help="Category filter for local logging of sandesh messages")
parser.add_argument("--log_file",
help="Filename for the logs to be written to")
SandeshConfig.add_parser_arguments(parser)
args = parser.parse_args(remaining_argv)
return args
def main(args_str=None):
if not args_str:
args_str = ' '.join(sys.argv[1:])
args = parse_args(args_str)
prog = EventManager(args.node_type)
collector_addr = []
if (args.node_type == 'storage-compute' or args.node_type == 'storage-master'):
#storage node module initialization part
module = Module.STORAGE_STATS_MGR
module_name = ModuleNames[module]
node_type = Module2NodeType[module]
node_type_name = NodeTypeNames[node_type]
instance_id = INSTANCE_ID_DEFAULT
sandesh_config = SandeshConfig.from_parser_arguments(args)
sandesh_global.init_generator(
module_name,
socket.gethostname(),
node_type_name,
instance_id,
collector_addr,
module_name,
HttpPortStorageStatsmgr,
['stats_daemon.sandesh.storage'],
config=sandesh_config)
sandesh_global.set_logging_params(
enable_local_log=args.log_local,
category=args.log_category,
level=args.log_level,
file=args.log_file, enable_syslog=False,
syslog_facility='LOG_LOCAL0')
gevent.joinall([gevent.spawn(prog.runforever)])
if __name__ == '__main__':
main()
|
{
"content_hash": "bc85ec5e547445f0d385b33736912a1b",
"timestamp": "",
"source": "github",
"line_count": 922,
"max_line_length": 87,
"avg_line_length": 41.61279826464208,
"alnum_prop": 0.4904214559386973,
"repo_name": "nischalsheth/contrail-controller",
"id": "bc2912526b2d3d4161ee18a578255844daf51efc",
"size": "38386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/storage/stats-daemon/stats_daemon/storage_nodemgr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "88437"
},
{
"name": "C++",
"bytes": "23392370"
},
{
"name": "CSS",
"bytes": "531"
},
{
"name": "GDB",
"bytes": "44610"
},
{
"name": "Go",
"bytes": "45352"
},
{
"name": "HTML",
"bytes": "519766"
},
{
"name": "Java",
"bytes": "171966"
},
{
"name": "LLVM",
"bytes": "2937"
},
{
"name": "Lua",
"bytes": "20359"
},
{
"name": "Makefile",
"bytes": "12449"
},
{
"name": "Python",
"bytes": "7781013"
},
{
"name": "Roff",
"bytes": "41295"
},
{
"name": "Ruby",
"bytes": "13596"
},
{
"name": "Shell",
"bytes": "63970"
},
{
"name": "Thrift",
"bytes": "5666"
},
{
"name": "Yacc",
"bytes": "7737"
}
],
"symlink_target": ""
}
|
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2014, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
__author__ = 'Marcel Hellkamp'
__version__ = '0.13-dev'
__license__ = 'MIT'
# The gevent server adapter needs to patch some modules before they are imported
# This is why we parse the commandline parameters here but handle them later
if __name__ == '__main__':
from optparse import OptionParser
_cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
_opt = _cmd_parser.add_option
_opt("--version", action="store_true", help="show version number.")
_opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
_opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
_opt("-p", "--plugin", action="append", help="install additional plugin/s.")
_opt("--debug", action="store_true", help="start server in debug mode.")
_opt("--reload", action="store_true", help="auto-reload on file changes.")
_cmd_options, _cmd_args = _cmd_parser.parse_args()
if _cmd_options.server and _cmd_options.server.startswith('gevent'):
import gevent.monkey; gevent.monkey.patch_all()
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, subprocess, sys, tempfile, threading, time, warnings
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
from inspect import getargspec
from unicodedata import normalize
try: from simplejson import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try: from json import dumps as json_dumps, loads as json_lds
except ImportError:
try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError("JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
# It ain't pretty but it works... Sorry for the mess.
py = sys.version_info
py3k = py >= (3, 0, 0)
py25 = py < (2, 6, 0)
py31 = (3, 1, 0) <= py < (3, 2, 0)
# Workaround for the missing "as" keyword in py3k.
def _e(): return sys.exc_info()[1]
# Workaround for the "print is a keyword/function" Python 2/3 dilemma
# and a fallback for mod_wsgi (resticts stdout/err attribute access)
try:
_stdout, _stderr = sys.stdout.write, sys.stderr.write
except IOError:
_stdout = lambda x: sys.stdout.write(x)
_stderr = lambda x: sys.stderr.write(x)
# Lots of stdlib and builtin differences.
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
urlunquote = functools.partial(urlunquote, encoding='latin1')
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
from configparser import ConfigParser
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
from ConfigParser import SafeConfigParser as ConfigParser
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from UserDict import DictMixin
def next(it): return it.next()
bytes = str
else: # 2.6, 2.7
from collections import MutableMapping as DictMixin
unicode = unicode
json_loads = json_lds
eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
# Some helpers for string/byte handling
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
if isinstance(s, bytes):
return s.decode(enc, err)
else:
return unicode(s or ("" if s is None else s))
tonat = touni if py3k else tob
# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
# 3.1 needs a workaround.
if py31:
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self): pass # Keep wrapped buffer open.
# A bug in functools causes it to break if the wrapper is an instance method
def update_wrapper(wrapper, wrapped, *a, **ka):
try:
functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError:
pass
# These helpers are used at module level and need to be defined first.
# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
def depr(message, strict=False):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just to handy
if isinstance(data, (tuple, list, set, dict)):
return list(data)
elif data:
return [data]
else:
return []
class DictProperty(object):
""" Property that maps to a key in a local dict-like attribute. """
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
""" A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. """
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
""" A property that caches itself to the class object. """
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
###############################################################################
# Exceptions and Events ########################################################
###############################################################################
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
###############################################################################
# Routing ######################################################################
###############################################################################
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError): pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router. """
class RouteBuildError(RouteError):
""" The route could not be built. """
def _re_flatten(p):
""" Turn all capturing groups in a regular expression pattern into
non-capturing groups. """
if '(' not in p:
return p
return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
class Router(object):
""" A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
"""
default_pattern = '[^/]+'
default_filter = 're'
#: The current CPython regexp implementation does not allow more
#: than 99 matching groups per regular expression.
_MAX_GROUPS_PER_PATTERN = 99
def __init__(self, strict=False):
self.rules = [] # All rules in order
self._groups = {} # index of regexes to find them in dyna_routes
self.builder = {} # Data structure for the url builder
self.static = {} # Search structure for static routes
self.dyna_routes = {}
self.dyna_regexes = {} # Search structure for dynamic routes
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {
're': lambda conf:
(_re_flatten(conf or self.default_pattern), None, None),
'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
'path': lambda conf: (r'.+?', None, None)}
def add_filter(self, name, func):
""" Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. """
self.filters[name] = func
rule_syntax = re.compile('(\\\\*)'
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def _itertokens(self, rule):
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0])%2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix:
yield prefix, None, None
name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
yield name, filtr or 'default', conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix+rule[offset:], None, None
def add(self, rule, method, target, name=None):
""" Add a new rule or replace the target for an existing rule. """
anons = 0 # Number of anonymous wildcards found
keys = [] # Names of keys
pattern = '' # Regular expression pattern with named groups
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self._itertokens(rule):
if mode:
is_static = False
if mode == 'default': mode = self.default_filter
mask, in_filter, out_filter = self.filters[mode](conf)
if not key:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons
anons += 1
else:
pattern += '(?P<%s>%s)' % (key, mask)
keys.append(key)
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static.setdefault(method, {})
self.static[method][self.build(rule)] = (target, None)
return
try:
re_pattern = re.compile('^(%s)$' % pattern)
re_match = re_pattern.match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
if filters:
def getargs(path):
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
elif re_pattern.groupindex:
def getargs(path):
return re_match(path).groupdict()
else:
getargs = None
flatpat = _re_flatten(pattern)
whole_rule = (rule, flatpat, target, getargs)
if (flatpat, method) in self._groups:
if DEBUG:
msg = 'Route <%s %s> overwrites a previously defined route'
warnings.warn(msg % (method, rule), RuntimeWarning)
self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
else:
self.dyna_routes.setdefault(method, []).append(whole_rule)
self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
self._compile(method)
def _compile(self, method):
all_rules = self.dyna_routes[method]
comborules = self.dyna_regexes[method] = []
maxgroups = self._MAX_GROUPS_PER_PATTERN
for x in range(0, len(all_rules), maxgroups):
some = all_rules[x:x+maxgroups]
combined = (flatpat for (_, flatpat, _, _) in some)
combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
combined = re.compile(combined).match
rules = [(target, getargs) for (_, _, target, getargs) in some]
comborules.append((combined, rules))
def build(self, _name, *anons, **query):
""" Build an URL by filling the wildcards in a rule. """
builder = self.builder.get(_name)
if not builder: raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons): query['anon%d'%i] = value
url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
return url if not query else url+'?'+urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
""" Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). """
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
if verb == 'HEAD':
methods = ['PROXY', verb, 'GET', 'ANY']
else:
methods = ['PROXY', verb, 'ANY']
for method in methods:
if method in self.static and path in self.static[method]:
target, getargs = self.static[method][path]
return target, getargs(path) if getargs else {}
elif method in self.dyna_regexes:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
target, getargs = rules[match.lastindex - 1]
return target, getargs(path) if getargs else {}
# No matching route found. Collect alternative methods for 405 response
allowed = set([])
nocheck = set(methods)
for method in set(self.static) - nocheck:
if path in self.static[method]:
allowed.add(verb)
for method in set(self.dyna_regexes) - allowed - nocheck:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
allowed.add(method)
if allowed:
allow_header = ",".join(sorted(allowed))
raise HTTPError(405, "Method not allowed.", Allow=allow_header)
# No matching route and no alternative method found. We give up
raise HTTPError(404, "Not found: " + repr(path))
class Route(object):
""" This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
"""
def __init__(self, app, rule, method, callback, name=None,
plugins=None, skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/:page``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict().load_dict(config)
@cached_property
def call(self):
""" The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests."""
return self._make_callback()
def reset(self):
""" Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. """
self.__dict__.pop('call', None)
def prepare(self):
""" Do all on-demand work immediately (useful for debugging)."""
self.call
def all_plugins(self):
""" Yield all Plugins affecting this route. """
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
callback = plugin.apply(callback, self)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def get_undecorated_callback(self):
""" Return the callback. If the callback is a decorated function, try to
recover the original function. """
func = self.callback
func = getattr(func, '__func__' if py3k else 'im_func', func)
closure_attr = '__closure__' if py3k else 'func_closure'
while hasattr(func, closure_attr) and getattr(func, closure_attr):
func = getattr(func, closure_attr)[0].cell_contents
return func
def get_callback_args(self):
""" Return a list of argument names the callback (most likely) accepts
as keyword arguments. If the callback is a decorated function, try
to recover the original function before inspection. """
return getargspec(self.get_undecorated_callback())[0]
def get_config(self, key, default=None):
""" Lookup a config field and return its value, first checking the
route.config, then route.app.config."""
for conf in (self.config, self.app.conifg):
if key in conf: return conf[key]
return default
def __repr__(self):
cb = self.get_undecorated_callback()
return '<%s %r %r>' % (self.method, self.rule, cb)
###############################################################################
# Application Object ###########################################################
###############################################################################
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: A :class:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config._on_change = functools.partial(self.trigger_hook, 'config')
self.config.meta_set('autojson', 'validate', bool)
self.config.meta_set('catchall', 'validate', bool)
self.config['catchall'] = catchall
self.config['autojson'] = autojson
#: A :class:`ResourceManager` for application files
self.resources = ResourceManager()
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
if self.config['autojson']:
self.install(JSONPlugin())
self.install(TemplatePlugin())
#: If true, most exceptions are caught and returned as :exc:`HTTPError`
catchall = DictProperty('config', 'catchall')
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
""" Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
app_reset
Called whenever :meth:`Bottle.reset` is called.
"""
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
""" Remove a callback from a hook. """
if name in self._hooks and func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def trigger_hook(self, __name, *args, **kwargs):
""" Trigger a hook and return a list of results. """
return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. See
:meth:`add_hook` for details."""
def decorator(func):
self.add_hook(name, func)
return func
return decorator
def mount(self, prefix, app, **options):
""" Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
"""
segments = [p for p in prefix.split('/') if p]
if not segments: raise ValueError('Empty path prefix.')
path_depth = len(segments)
def mountpoint_wrapper():
try:
request.path_shift(path_depth)
rs = HTTPResponse([])
def start_response(status, headerlist, exc_info=None):
if exc_info:
_raise(*exc_info)
rs.status = status
for name, value in headerlist: rs.add_header(name, value)
return rs.body.append
body = app(request.environ, start_response)
if body and rs.body: body = itertools.chain(rs.body, body)
rs.body = body or rs.body
return rs
finally:
request.path_shift(-path_depth)
options.setdefault('skip', True)
options.setdefault('method', 'PROXY')
options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
options['callback'] = mountpoint_wrapper
self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
if not prefix.endswith('/'):
self.route('/' + '/'.join(segments), **options)
def merge(self, routes):
""" Merge the routes of another :class:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. """
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
""" Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
"""
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
""" Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. """
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def reset(self, route=None):
""" Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. """
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes: route.reset()
if DEBUG:
for route in routes: route.prepare()
self.trigger_hook('app_reset')
def close(self):
""" Close the application and all installed plugins. """
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
def run(self, **kwargs):
""" Calls :func:`run` with the same parameters. """
run(self, **kwargs)
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
""" Add a route object, but do not change the :data:`Route.app`
attribute."""
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self, path=None, method='GET', callback=None, name=None,
apply=None, skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/:name')
def hello(name):
return 'Hello %s' % name
The ``:name`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback, name=name,
plugins=plugins, skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def patch(self, path=None, method='PATCH', **options):
""" Equals :meth:`route` with a ``PATCH`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
return tob(template(ERROR_PAGE_TEMPLATE, e=res))
def _handle(self, environ):
path = environ['bottle.raw_path'] = environ['PATH_INFO']
if py3k:
try:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
try:
self.trigger_hook('before_request')
route, args = self.router.match(environ)
environ['route.handle'] = route
environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
finally:
self.trigger_hook('after_request')
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return self._handle(environ)
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
if 'Content-Length' not in response:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in response:
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.body)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, unicode):
encoder = lambda x: x.encode(response.charset)
new_iter = imap(encoder, itertools.chain([first], iout))
else:
msg = 'Unsupported response type: %s' % type(first)
return self._cast(HTTPError(500, msg))
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
""" Each instance of :class:'Bottle' is a WSGI application. """
return self.wsgi(environ, start_response)
def __enter__(self):
""" Use this application as default for all module-level shortcuts. """
default_app.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
default_app.pop()
###############################################################################
# HTTP and WSGI Tools ##########################################################
###############################################################################
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ', )
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
""" Bottle application handling this request. """
raise RuntimeError('This request is not connected to an application.')
@DictProperty('environ', 'bottle.route', read_only=True)
def route(self):
""" The bottle :class:`Route` object that matches this request. """
raise RuntimeError('This request is not connected to a route.')
@DictProperty('environ', 'route.url_args', read_only=True)
def url_args(self):
""" The arguments extracted from the URL. """
raise RuntimeError('This request is not connected to a route.')
@property
def path(self):
""" The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). """
return '/' + self.environ.get('PATH_INFO','').lstrip('/')
@property
def method(self):
""" The ``REQUEST_METHOD`` value as an uppercase string. """
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
""" A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. """
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
""" Return the value of a request header, or a given default value. """
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
""" The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. """
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
""" If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. """
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
if ctype == 'application/json':
return json_loads(self._get_body_string())
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
@staticmethod
def _iter_chunked(read, bufsize):
err = HTTPError(400, 'Error while parsing chunked transfer body.')
rn, sem, bs = tob('\r\n'), tob(';'), tob('')
while True:
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
body_iter = self._iter_chunked if self.chunked else self._iter_body
read_func = self.environ['wsgi.input'].read
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self):
""" read body until content-length or MEMFILE_MAX into a string. Raise
HTTPError(413) on requests that are to large. """
clen = self.content_length
if clen > self.MEMFILE_MAX:
raise HTTPError(413, 'Request to large')
if clen < 0: clen = self.MEMFILE_MAX + 1
data = self.body.read(clen)
if len(data) > self.MEMFILE_MAX: # Fail fast
raise HTTPError(413, 'Request to large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
""" True if Chunked transfer encoding was. """
return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
for key, value in pairs:
post[key] = value
return post
safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py31:
args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
newline='\n')
elif py3k:
args['encoding'] = 'utf8'
data = cgi.FieldStorage(**args)
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
""" The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. """
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
""" The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. """
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
""" Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
"""
script = self.environ.get('SCRIPT_NAME','/')
self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
@property
def content_length(self):
""" The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. """
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
""" The Content-Type header as a lowercase-string (default: empty). """
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
""" True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). """
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
""" Alias for :attr:`is_xhr`. "Ajax" is not the right term. """
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None): return self.environ.get(value, default)
def __getitem__(self, key): return self.environ[key]
def __delitem__(self, key): self[key] = ""; del(self.environ[key])
def __iter__(self): return iter(self.environ)
def __len__(self): return len(self.environ)
def keys(self): return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.'+key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
""" Search in self.environ for additional user defined attributes. """
try:
var = self.environ['bottle.request.ext.%s'%name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
self.environ['bottle.request.ext.%s'%name] = value
def _hkey(s):
return s.title().replace('_','-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.default = name, default
self.reader, self.writer = reader, writer
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, _):
if obj is None: return self
value = obj.headers.get(self.name, self.default)
return self.reader(value) if self.reader else value
def __set__(self, obj, value):
obj.headers[self.name] = self.writer(value)
def __delete__(self, obj):
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
:param body: The response body as one of the supported types.
:param status: Either an HTTP status code (e.g. 200) or a status line
including the reason phrase (e.g. '200 OK').
:param headers: A dictionary or a list of name-value pairs.
Additional keyword arguments are added to the list of headers.
Underscores in the header name are replaced with dashes.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type',)),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))}
def __init__(self, body='', status=None, headers=None, **more_headers):
self._cookies = None
self._headers = {}
self.body = body
self.status = status or self.default_status
if headers:
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers:
self.add_header(name, value)
if more_headers:
for name, value in more_headers.items():
self.add_header(name, value)
def copy(self, cls=None):
""" Returns a copy of self. """
cls = cls or BaseResponse
assert issubclass(cls, BaseResponse)
copy = cls()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
if self._cookies:
copy._cookies = SimpleCookie()
copy._cookies.load(self._cookies.output())
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
""" The HTTP status line as a string (e.g. ``404 Not Found``)."""
return self._status_line
@property
def status_code(self):
""" The HTTP status code as an integer (e.g. 404)."""
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999: raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = str(status or ('%d Unknown' % code))
def _get_status(self):
return self._status_line
status = property(_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
""" An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. """
hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name): return _hkey(name) in self._headers
def __delitem__(self, name): del self._headers[_hkey(name)]
def __getitem__(self, name): return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)]
def get_header(self, name, default=None):
""" Return the value of a previously defined header. If there is no
header with that name, return a default value. """
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value):
""" Create a new response header, replacing any previously defined
headers with the same name. """
self._headers[_hkey(name)] = [str(value)]
def add_header(self, name, value):
""" Add an additional response header, not removing duplicates. """
self._headers.setdefault(_hkey(name), []).append(str(value))
def iter_headers(self):
""" Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. """
return self.headerlist
@property
def headerlist(self):
""" WSGI conform list of (header, value) tuples. """
out = []
headers = list(self._headers.items())
if 'Content-Type' not in self._headers:
headers.append(('Content-Type', [self.default_content_type]))
if self._status_code in self.bad_headers:
bad_headers = self.bad_headers[self._status_code]
headers = [h for h in headers if h[0] not in bad_headers]
out += [(name, val) for name, vals in headers for val in vals]
if self._cookies:
for c in self._cookies.values():
out.append(('Set-Cookie', c.OutputString()))
return out
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
expires = HeaderProperty('Expires',
reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
writer=lambda x: http_date(x))
@property
def charset(self, default='UTF-8'):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return default
def set_cookie(self, name, value, secret=None, **options):
""" Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
"""
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
if len(value) > 4096: raise ValueError('Cookie value to long.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
""" Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. """
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
def _local_property():
ls = threading.local()
def fget(_):
try: return ls.var
except AttributeError:
raise RuntimeError("Request context not initialized.")
def fset(_, value): ls.var = value
def fdel(_): del ls.var
return property(fget, fset, fdel, 'Thread-local property')
class LocalRequest(BaseRequest):
""" A thread-local subclass of :class:`BaseRequest` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). """
bind = BaseRequest.__init__
environ = _local_property()
class LocalResponse(BaseResponse):
""" A thread-local subclass of :class:`BaseResponse` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
"""
bind = BaseResponse.__init__
_status_line = _local_property()
_status_code = _local_property()
_cookies = _local_property()
_headers = _local_property()
body = _local_property()
Request = BaseRequest
Response = BaseResponse
class HTTPResponse(Response, BottleException):
def __init__(self, body='', status=None, headers=None, **more_headers):
super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
def apply(self, other):
other._status_code = self._status_code
other._status_line = self._status_line
other._headers = self._headers
other._cookies = self._cookies
other.body = self.body
class HTTPError(HTTPResponse):
default_status = 500
def __init__(self, status=None, body=None, exception=None, traceback=None,
**options):
self.exception = exception
self.traceback = traceback
super(HTTPError, self).__init__(body, status, **options)
###############################################################################
# Plugins ######################################################################
###############################################################################
class PluginError(BottleException): pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
class TemplatePlugin(object):
""" This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. """
name = 'template'
api = 2
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
#: Not a plugin, but part of the plugin API. TODO: Find a better place.
class _ImportRedirect(object):
def __init__(self, name, impmask):
""" Create a virtual package that redirects imports (see PEP 302). """
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({'__file__': __file__, '__path__': [],
'__all__': [], '__loader__': self})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname = fullname.rsplit('.', 1)[0]
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
modname = fullname.rsplit('.', 1)[1]
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
###############################################################################
# Common Utilities #############################################################
###############################################################################
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self): return len(self.dict)
def __iter__(self): return iter(self.dict)
def __contains__(self, key): return key in self.dict
def __delitem__(self, key): del self.dict[key]
def __getitem__(self, key): return self.dict[key][-1]
def __setitem__(self, key, value): self.append(key, value)
def keys(self): return self.dict.keys()
if py3k:
def values(self): return (v[-1] for v in self.dict.values())
def items(self): return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self): return [v[-1] for v in self.dict.values()]
def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self): return self.dict.iterkeys()
def itervalues(self): return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
""" Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
"""
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
""" Add a new value to the list of values for this key. """
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
""" Replace the list of values with a single value. """
self.dict[key] = [value]
def getall(self, key):
""" Return a (possibly empty) list of values for a key. """
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
""" This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. """
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
elif isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
""" Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. """
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
""" Return the value as a unicode string, or the default. """
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super(FormsDict, self).__getattr__(name)
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key): return _hkey(key) in self.dict
def __delitem__(self, key): del self.dict[_hkey(key)]
def __getitem__(self, key): return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(str(value))
def replace(self, key, value): self.dict[_hkey(key)] = [str(value)]
def getall(self, key): return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
""" This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
"""
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
""" Translate header field name to CGI/WSGI environ key. """
key = key.replace('-','_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
""" Return the header value as is (may be bytes or unicode). """
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
return tonat(self.environ[self._ekey(key)], 'latin1')
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield key[5:].replace('_', '-').title()
elif key in self.cgikeys:
yield key.replace('_', '-').title()
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class ConfigDict(dict):
""" A dict-like configuration storage with additional support for
namespaces, validators, meta-data, on_change listeners and more.
"""
__slots__ = ('_meta', '_on_change')
def __init__(self):
self._meta = {}
self._on_change = lambda name, value: None
def load_config(self, filename):
""" Load values from an ``*.ini`` style config file.
If the config file contains sections, their names are used as
namespaces for the values within. The two special sections
``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
"""
conf = ConfigParser()
conf.read(filename)
for section in conf.sections():
for key, value in conf.items(section):
if section not in ('DEFAULT', 'bottle'):
key = section + '.' + key
self[key] = value
return self
def load_dict(self, source, namespace=''):
""" Load values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> c = ConfigDict()
>>> c.load_dict({'some': {'namespace': {'key': 'value'} } })
{'some.namespace.key': 'value'}
"""
for key, value in source.items():
if isinstance(key, str):
nskey = (namespace + '.' + key).strip('.')
if isinstance(value, dict):
self.load_dict(value, namespace=nskey)
else:
self[nskey] = value
else:
raise TypeError('Key has type %r (not a string)' % type(key))
return self
def update(self, *a, **ka):
""" If the first parameter is a string, all keys are prefixed with this
namespace. Apart from that it works just as the usual dict.update().
Example: ``update('some.namespace', key='value')`` """
prefix = ''
if a and isinstance(a[0], str):
prefix = a[0].strip('.') + '.'
a = a[1:]
for key, value in dict(*a, **ka).items():
self[prefix+key] = value
def setdefault(self, key, value):
if key not in self:
self[key] = value
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError('Key has type %r (not a string)' % type(key))
value = self.meta_get(key, 'filter', lambda x: x)(value)
if key in self and self[key] is value:
return
self._on_change(key, value)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self._on_change(key, None)
dict.__delitem__(self, key)
def meta_get(self, key, metafield, default=None):
""" Return the value of a meta field for a key. """
return self._meta.get(key, {}).get(metafield, default)
def meta_set(self, key, metafield, value):
""" Set the meta field for a key to a new value. This triggers the
on-change handler for existing keys. """
self._meta.setdefault(key, {})[metafield] = value
if key in self:
self[key] = self[key]
def meta_list(self, key):
""" Return an iterable of meta field names defined for a key. """
return self._meta.get(key, {}).keys()
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024*64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class _closeiter(object):
""" This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). """
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = makelist(close)
def __iter__(self):
return iter(self.iterator)
def close(self):
for func in self.close_callbacks:
func()
class ResourceManager(object):
""" This class manages a list of search paths and helps to find and open
application-bound resources (files).
:param base: default value for :meth:`add_path` calls.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
"""
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = opener
self.base = base
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None, create=False):
""" Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
"""
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
def __iter__(self):
""" Iterate over all existing files in all registered paths. """
search = self.path[:]
while search:
path = search.pop()
if not os.path.isdir(path): continue
for name in os.listdir(path):
full = os.path.join(path, name)
if os.path.isdir(full): search.append(full)
else: yield full
def lookup(self, name):
""" Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. The first match is
returend. Symlinks are followed. The result is cached to speed up
future lookups. """
if name not in self.cache or DEBUG:
for path in self.path:
fpath = os.path.join(path, name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, mode='r', *args, **kwargs):
""" Find a resource and return a file object, or raise IOError. """
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(fname, mode=mode, *args, **kwargs)
class FileUpload(object):
def __init__(self, fileobj, name, filename, headers=None):
""" Wrapper for file uploads. """
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
@cached_property
def filename(self):
""" Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
"""
fname = self.raw_filename
if not isinstance(fname, unicode):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2**16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while 1:
buf = read(chunk_size)
if not buf: break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2**16):
""" Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
"""
if isinstance(destination, basestring): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
""" Yield chunks from a range in a file. No chunk is bigger than maxread."""
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
headers = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
mimetype, encoding = mimetypes.guess_type(filename)
if encoding: headers['Content-Encoding'] = encoding
if mimetype:
if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
mimetype += '; charset=%s' % charset
headers['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
headers['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
headers['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
headers['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
return HTTPResponse(status=304, **headers)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
headers["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
headers["Content-Length"] = str(end-offset)
if body: body = _file_iter_range(body, offset, end-offset)
return HTTPResponse(body, status=206, **headers)
return HTTPResponse(body, **headers)
###############################################################################
# HTTP Utilities and MISC (TODO) ###############################################
###############################################################################
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
if mode: warnings.simplefilter('default')
DEBUG = bool(mode)
def http_date(value):
if isinstance(value, (datedate, datetime)):
value = value.utctimetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
if not isinstance(value, basestring):
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
return value
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
""" Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive."""
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen-int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end)+1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _parse_qsl(qs):
r = []
for pair in qs.replace(';','&').split('&'):
if not pair: continue
nv = pair.split('=', 1)
if len(nv) != 2: nv.append('')
key = urlunquote(nv[0].replace('+', ' '))
value = urlunquote(nv[1].replace('+', ' '))
r.append((key, value))
return r
def _lscmp(a, b):
""" Compares two strings in a cryptographically safe way:
Runtime is not affected by length of common prefix. """
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
""" Encode and sign a pickle-able object. Return a (byte) string """
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
""" Verify and decode an encoded string. Return an object or None."""
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
""" Return True if the argument looks like a encoded cookie."""
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
""" Escape HTML special characters ``&<>`` and quotes ``'"``. """
return string.replace('&','&').replace('<','<').replace('>','>')\
.replace('"','"').replace("'",''')
def html_quote(string):
""" Escape and quote a string to be used as an HTTP attribute."""
return '"%s"' % html_escape(string).replace('\n',' ')\
.replace('\r',' ').replace('\t','	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/<x>/<y>'
c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>'
d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
"""
path = '/' + func.__name__.replace('__','/').lstrip('/')
spec = getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/<%s>' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/<%s>' % arg
yield path
def path_shift(script_name, path_info, shift=1):
""" Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
"""
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if 0 < shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif 0 > shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def auth_basic(check, realm="private", text="Access denied"):
""" Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. """
def decorator(func):
@functools.wraps(func)
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
err = HTTPError(401, text)
err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
return err
return func(*a, **ka)
return wrapper
return decorator
# Shortcuts for common Bottle methods.
# They all refer to the current default application.
def make_default_app_wrapper(name):
""" Return a callable that relays calls to the current default app. """
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
patch = make_default_app_wrapper('patch')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
###############################################################################
# Server Adapter ###############################################################
###############################################################################
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **options):
self.options = options
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, app): # pragma: no cover
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
from wsgiref.simple_server import make_server
import socket
class FixedHandler(WSGIRequestHandler):
def address_string(self): # Prevent reverse DNS lookups please.
return self.client_address[0]
def log_request(*args, **kw):
if not self.quiet:
return WSGIRequestHandler.log_request(*args, **kw)
handler_cls = self.options.get('handler_class', FixedHandler)
server_cls = self.options.get('server_class', WSGIServer)
if ':' in self.host: # Fix wsgiref for IPv6 addresses.
if getattr(server_cls, 'address_family') == socket.AF_INET:
class server_cls(server_cls):
address_family = socket.AF_INET6
srv = make_server(self.host, self.port, app, server_cls, handler_cls)
srv.serve_forever()
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
self.options['bind_addr'] = (self.host, self.port)
self.options['wsgi_app'] = handler
certfile = self.options.get('certfile')
if certfile:
del self.options['certfile']
keyfile = self.options.get('keyfile')
if keyfile:
del self.options['keyfile']
server = wsgiserver.CherryPyWSGIServer(**self.options)
if certfile:
server.ssl_certificate = certfile
if keyfile:
server.ssl_private_key = keyfile
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, setup_console_handler=(not self.quiet))
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port,address=self.host)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
if not reactor.running:
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
* See gevent.wsgi.WSGIServer() documentation for more options.
"""
def run(self, handler):
from gevent import wsgi, pywsgi, local
if not isinstance(threading.local(), local.local):
msg = "Bottle requires gevent.monkey.patch_all() (before import)"
raise RuntimeError(msg)
if not self.options.pop('fast', None): wsgi = pywsgi
self.options['log'] = None if self.quiet else 'default'
address = (self.host, self.port)
server = wsgi.WSGIServer(address, handler, **self.options)
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: server.stop())
server.serve_forever()
class GeventSocketIOServer(ServerAdapter):
def run(self,handler):
from socketio import server
address = (self.host, self.port)
server.SocketIOServer(address, handler, **self.options).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested """
def run(self, handler):
from eventlet import wsgi, listen
try:
wsgi.server(listen((self.host, self.port)), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen((self.host, self.port)), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'geventSocketIO':GeventSocketIOServer,
'rocket': RocketServer,
'bjoern' : BjoernServer,
'auto': AutoServer,
}
###############################################################################
# Application Control ##########################################################
###############################################################################
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN; NORUN, nr_old = True, NORUN
tmp = default_app.push() # Create a new "default application"
try:
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
interval=1, reloader=False, quiet=False, plugins=None,
debug=None, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
lockfile = None
try:
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
if debug is not None: _debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
app.install(plugin)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
""" Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. """
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda p: os.stat(p).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, *_):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
###############################################################################
# Template Adapters ############################################################
###############################################################################
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl','html','thtml','stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup] if lookup else []
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=None):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if not lookup:
depr('The template lookup path list should not be empty.', True) #0.12
lookup = ['.']
if os.path.isabs(name) and os.path.isfile(name):
depr('Absolute template path names are deprecated.', True) #0.12
return os.path.abspath(name)
for spath in lookup:
spath = os.path.abspath(spath) + os.sep
fname = os.path.abspath(os.path.join(spath, name))
if not fname.startswith(spath): continue
if os.path.isfile(fname): return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
""" This reads or sets the global settings stored in class.settings. """
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (args)
or directly, as keywords (kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding':self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, globals={}, **kwargs):
from jinja2 import Environment, FunctionLoader
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if globals: self.env.globals.update(globals)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTemplate(BaseTemplate):
def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
self.syntax = syntax
if noescape:
self._str, self._escape = self._escape, self._str
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
source = self.source
if not source:
with open(self.filename, 'rb') as f:
source = f.read()
try:
source, encoding = touni(source), 'utf8'
except UnicodeError:
depr('Template encodings other than utf8 are no longer supported.') #0.11
source, encoding = touni(source, 'latin1'), 'latin1'
parser = StplParser(source, encoding=encoding, syntax=self.syntax)
code = parser.translate()
self.encoding = parser.encoding
return code
def _rebase(self, _env, _name=None, **kwargs):
_env['_rebase'] = (_name, kwargs)
def _include(self, _env, _name=None, **kwargs):
env = _env.copy()
env.update(kwargs)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(env['_stdout'], env)
def execute(self, _stdout, kwargs):
env = self.defaults.copy()
env.update(kwargs)
env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
'include': functools.partial(self._include, env),
'rebase': functools.partial(self._rebase, env), '_rebase': None,
'_str': self._str, '_escape': self._escape, 'get': env.get,
'setdefault': env.setdefault, 'defined': env.__contains__ })
eval(self.co, env)
if env.get('_rebase'):
subtpl, rargs = env.pop('_rebase')
rargs['base'] = ''.join(_stdout) #copy stdout
del _stdout[:] # clear stdout
return self._include(env, subtpl, **rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
env = {}; stdout = []
for dictarg in args: env.update(dictarg)
env.update(kwargs)
self.execute(stdout, env)
return ''.join(stdout)
class StplSyntaxError(TemplateError): pass
class StplParser(object):
""" Parser for stpl templates. """
_re_cache = {} #: Cache for compiled re patterns
# This huge pile of voodoo magic splits python code into 8 different tokens.
# 1: All kinds of python strings (trust me, it works)
_re_tok = '((?m)[urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
'|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
'|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
'|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
_re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
# 2: Comments (until end of line, but not the newline itself)
_re_tok += '|(#.*)'
# 3,4: Keywords that start or continue a python block (only start of line)
_re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
'|^([ \\t]*(?:elif|else|except|finally)\\b)'
# 5: Our special 'end' keyword (but only if it stands alone)
_re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
# 6: A customizable end-of-code-block template token (only end of line)
_re_tok += '|(%(block_close)s[ \\t]*(?=$))'
# 7: And finally, a single newline. The 8th token is 'everything else'
_re_tok += '|(\\r?\\n)'
# Match the start tokens of code areas in a template
_re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))'
# Match inline statements (may contain python strings)
_re_inl = '%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
self.source, self.encoding = touni(source, encoding), encoding
self.set_syntax(syntax or self.default_syntax)
self.code_buffer, self.text_buffer = [], []
self.lineno, self.offset = 1, 0
self.indent, self.indent_mod = 0, 0
def get_syntax(self):
""" Tokens as a space separated string (default: <% %> % {{ }}) """
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if not syntax in self._re_cache:
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile(p%pattern_vars) for p in patterns]
self._re_cache[syntax] = patterns
self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset: raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source[self.offset:])
if m:
text = self.source[self.offset:self.offset+m.start()]
self.text_buffer.append(text)
self.offset += m.end()
if m.group(1): # Escape syntax
line, sep, _ = self.source[self.offset:].partition('\n')
self.text_buffer.append(m.group(2)+line+sep)
self.offset += len(line+sep)+1
continue
self.flush_text()
self.read_code(multiline=bool(m.group(4)))
else: break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, multiline):
code_line, comment = '', ''
while True:
m = self.re_tok.search(self.source[self.offset:])
if not m:
code_line += self.source[self.offset:]
self.offset = len(self.source)
self.write_code(code_line.strip(), comment)
return
code_line += self.source[self.offset:self.offset+m.start()]
self.offset += m.end()
_str, _com, _blk1, _blk2, _end, _cend, _nl = m.groups()
if code_line and (_blk1 or _blk2): # a if b else c
code_line += _blk1 or _blk2
continue
if _str: # Python string
code_line += _str
elif _com: # Python comment (up to EOL)
comment = _com
if multiline and _com.strip().endswith(self._tokens[1]):
multiline = False # Allow end-of-block in comments
elif _blk1: # Start-block keyword (if/for/while/def/try/...)
code_line, self.indent_mod = _blk1, -1
self.indent += 1
elif _blk2: # Continue-block keyword (else/elif/except/...)
code_line, self.indent_mod = _blk2, -1
elif _end: # The non-standard 'end'-keyword (ends a block)
self.indent -= 1
elif _cend: # The end-code-block template token (usually '%>')
if multiline: multiline = False
else: code_line += _cend
else: # \n
self.write_code(code_line.strip(), comment)
self.lineno += 1
code_line, comment, self.indent_mod = '', '', 0
if not multiline:
break
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if not text: return
parts, pos, nl = [], 0, '\\\n'+' '*self.indent
for m in self.re_inl.finditer(text):
prefix, pos = text[pos:m.start()], m.end()
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'): parts[-1] += nl
parts.append(self.process_inline(m.group(1).strip()))
if pos < len(text):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
parts.append(nl.join(map(repr, lines)))
code = '_printlist((%s,))' % ', '.join(parts)
self.lineno += code.count('\n')+1
self.write_code(code)
@staticmethod
def process_inline(chunk):
if chunk[0] == '!': return '_str(%s)' % chunk[1:]
return '_escape(%s)' % chunk
def write_code(self, line, comment=''):
code = ' ' * (self.indent+self.indent_mod)
code += line.lstrip() + comment + '\n'
self.code_buffer.append(code)
def template(*args, **kwargs):
"""
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
"""
tpl = args[0] if args else None
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if tplid not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
if isinstance(tpl, adapter):
TEMPLATES[tplid] = tpl
if settings: TEMPLATES[tplid].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tplid]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]: kwargs.update(dictarg)
return TEMPLATES[tplid].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
def view(tpl_name, **defaults):
""" Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
elif result is None:
return template(tpl_name, defaults)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
###############################################################################
# Constants and Globals ########################################################
###############################################################################
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
HTTP_CODES = httplib.responses
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
#: The default template used for error pages. Override with @error()
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, request
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error: {{e.status}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans-serif;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error: {{e.status}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.body}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
#: request callback, this instance always refers to the *current* request
#: (even on a multithreaded server).
request = LocalRequest()
#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
#: HTTP response for the *current* request.
response = LocalResponse()
#: A thread-safe namespace. Not used by Bottle.
local = threading.local()
# Initialize app stack (create first empty Bottle app)
# BC: 0.6.4 and needed for run()
app = default_app = AppStack()
app.push()
#: A virtual package that redirects import statements.
#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
if opt.version:
_stdout('Bottle %s\n'%__version__)
sys.exit(0)
if not args:
parser.print_help()
_stderr('\nError: No application entry point specified.\n')
sys.exit(1)
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host and host.rfind(']') < host.rfind(':'):
host, port = host.rsplit(':', 1)
host = host.strip('[]')
run(args[0], host=host, port=int(port), server=opt.server,
reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
# THE END
|
{
"content_hash": "c8dc893b41a5ca9d2c2aaf63f741044a",
"timestamp": "",
"source": "github",
"line_count": 3586,
"max_line_length": 103,
"avg_line_length": 39.510039040713885,
"alnum_prop": 0.5806624647981762,
"repo_name": "tashaband/RYU295",
"id": "3f4f0deaef90687c14f7a914ca733478183ec6a3",
"size": "141729",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ryu/Gui/bottle.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "846874"
},
{
"name": "CSS",
"bytes": "5449"
},
{
"name": "Erlang",
"bytes": "849217"
},
{
"name": "JavaScript",
"bytes": "1757"
},
{
"name": "Python",
"bytes": "3989777"
},
{
"name": "Shell",
"bytes": "68712"
}
],
"symlink_target": ""
}
|
from django.core.management.commands import migrate
from django.db.utils import DEFAULT_DB_ALIAS
from django_sae.conf.settings import patch_sae_restful_mysql
class Command(migrate.Command):
def handle(self, *args, **kwargs):
patch_sae_restful_mysql()
kwargs.setdefault('database', DEFAULT_DB_ALIAS)
super(Command, self).handle(*args, **kwargs)
|
{
"content_hash": "7e44e8825d94715f013022685136fea2",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 60,
"avg_line_length": 37.4,
"alnum_prop": 0.7272727272727273,
"repo_name": "smallcode/django-sae",
"id": "676967e20c36616d2914f5eb37b320314f64e2ed",
"size": "389",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django_sae/management/commands/sae_migrate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "41447"
}
],
"symlink_target": ""
}
|
import pymysql.cursors
from model import Group
from model import Contact
class DBFixture:
def __init__(self, host, database, user, password):
self.host = host
self.database = database
self.user = user
self.password = password
self.connection = pymysql.connect(
host = host,
database = database,
user = user,
password = password
)
self.connection.autocommit(True)
def get_group_list(self):
list = []
with (self.connection.cursor()) as cursor:
cursor.execute(
"select group_id, group_name, group_header, group_footer from group_list"
)
for row in cursor:
id, name, header, footer = row
list.append(
Group(
id=str(id),
name=name,
header=header,
footer=footer
)
)
return list
def get_contact_list(self):
list = []
with (self.connection.cursor()) as cursor:
cursor.execute(
"select id, firstname, lastname, address from addressbook where deprecated='0000-00-00 00:00:00'"
)
for row in cursor:
id, firstname, lastname, address = row
list.append(
Contact(
id=str(id),
firstname=firstname,
lastname=lastname,
address=address
)
)
return list
def destroy(self):
self.connection.close()
def do_something(self):
contacts = self.get_contact_list()
for contact in contacts:
print(contact)
print(len(contacts))
|
{
"content_hash": "448a2b2db81b887c1e4ab0de417c455f",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 113,
"avg_line_length": 30.467741935483872,
"alnum_prop": 0.47591318157755425,
"repo_name": "melipharo/stru-python19",
"id": "18231a9f289858143733f9646e3e743413f0ae2e",
"size": "1889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fixture/db.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "394"
},
{
"name": "HTML",
"bytes": "795"
},
{
"name": "JavaScript",
"bytes": "7037"
},
{
"name": "Python",
"bytes": "44702"
}
],
"symlink_target": ""
}
|
"""A class to help start/stop the lighttpd server used by layout tests."""
import logging
import os
import time
from webkitpy.layout_tests.servers import http_server_base
_log = logging.getLogger(__name__)
class Lighttpd(http_server_base.HttpServerBase):
def __init__(self, port_obj, output_dir, background=False, port=None,
root=None, run_background=None, additional_dirs=None,
layout_tests_dir=None, number_of_servers=None):
"""Args:
output_dir: the absolute path to the layout test result directory
"""
# Webkit tests
http_server_base.HttpServerBase.__init__(self, port_obj, number_of_servers)
self._name = 'lighttpd'
self._output_dir = output_dir
self._port = port
self._root = root
self._run_background = run_background
self._additional_dirs = additional_dirs
self._layout_tests_dir = layout_tests_dir
self._pid_file = self._filesystem.join(self._runtime_path, '%s.pid' % self._name)
if self._port:
self._port = int(self._port)
if not self._layout_tests_dir:
self._layout_tests_dir = self._port_obj.layout_tests_dir()
self._webkit_tests = os.path.join(self._layout_tests_dir, 'http', 'tests')
self._js_test_resource = os.path.join(self._layout_tests_dir, 'resources')
self._media_resource = os.path.join(self._layout_tests_dir, 'media')
# Self generated certificate for SSL server (for client cert get
# <base-path>\chrome\test\data\ssl\certs\root_ca_cert.crt)
self._pem_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'httpd2.pem')
# One mapping where we can get to everything
self.VIRTUALCONFIG = []
if self._webkit_tests:
self.VIRTUALCONFIG.extend(
# Three mappings (one with SSL) for LayoutTests http tests
[{'port': 8000, 'docroot': self._webkit_tests},
{'port': 8080, 'docroot': self._webkit_tests},
{'port': 8443, 'docroot': self._webkit_tests,
'sslcert': self._pem_file}])
def _prepare_config(self):
base_conf_file = self._port_obj.path_from_webkit_base('Tools',
'Scripts', 'webkitpy', 'layout_tests', 'servers', 'lighttpd.conf')
out_conf_file = os.path.join(self._output_dir, 'lighttpd.conf')
time_str = time.strftime("%d%b%Y-%H%M%S")
access_file_name = "access.log-" + time_str + ".txt"
access_log = os.path.join(self._output_dir, access_file_name)
log_file_name = "error.log-" + time_str + ".txt"
error_log = os.path.join(self._output_dir, log_file_name)
# Write out the config
base_conf = self._filesystem.read_text_file(base_conf_file)
# FIXME: This should be re-worked so that this block can
# use with open() instead of a manual file.close() call.
f = self._filesystem.open_text_file_for_writing(out_conf_file)
f.write(base_conf)
# Write out our cgi handlers. Run perl through env so that it
# processes the #! line and runs perl with the proper command
# line arguments. Emulate apache's mod_asis with a cat cgi handler.
f.write(('cgi.assign = ( ".cgi" => "/usr/bin/env",\n'
' ".pl" => "/usr/bin/env",\n'
' ".asis" => "/bin/cat",\n'
' ".php" => "%s" )\n\n') %
self._port_obj._path_to_lighttpd_php())
# Setup log files
f.write(('server.errorlog = "%s"\n'
'accesslog.filename = "%s"\n\n') % (error_log, access_log))
# Setup upload folders. Upload folder is to hold temporary upload files
# and also POST data. This is used to support XHR layout tests that
# does POST.
f.write(('server.upload-dirs = ( "%s" )\n\n') % (self._output_dir))
# Setup a link to where the js test templates are stored
f.write(('alias.url = ( "/js-test-resources" => "%s" )\n\n') %
(self._js_test_resource))
if self._additional_dirs:
for alias, path in self._additional_dirs.iteritems():
f.write(('alias.url += ( "%s" => "%s" )\n\n') % (alias, path))
# Setup a link to where the media resources are stored.
f.write(('alias.url += ( "/media-resources" => "%s" )\n\n') %
(self._media_resource))
# dump out of virtual host config at the bottom.
if self._root:
if self._port:
# Have both port and root dir.
mappings = [{'port': self._port, 'docroot': self._root}]
else:
# Have only a root dir - set the ports as for LayoutTests.
# This is used in ui_tests to run http tests against a browser.
# default set of ports as for LayoutTests but with a
# specified root.
mappings = [{'port': 8000, 'docroot': self._root},
{'port': 8080, 'docroot': self._root},
{'port': 8443, 'docroot': self._root,
'sslcert': self._pem_file}]
else:
mappings = self.VIRTUALCONFIG
for mapping in mappings:
ssl_setup = ''
if 'sslcert' in mapping:
ssl_setup = (' ssl.engine = "enable"\n'
' ssl.pemfile = "%s"\n' % mapping['sslcert'])
f.write(('$SERVER["socket"] == "127.0.0.1:%d" {\n'
' server.document-root = "%s"\n' +
ssl_setup +
'}\n\n') % (mapping['port'], mapping['docroot']))
f.close()
executable = self._port_obj._path_to_lighttpd()
module_path = self._port_obj._path_to_lighttpd_modules()
start_cmd = [executable,
# Newly written config file
'-f', os.path.join(self._output_dir, 'lighttpd.conf'),
# Where it can find its module dynamic libraries
'-m', module_path]
if not self._run_background:
start_cmd.append(# Don't background
'-D')
# Copy liblightcomp.dylib to /tmp/lighttpd/lib to work around the
# bug that mod_alias.so loads it from the hard coded path.
if self._port_obj.host.platform.is_mac():
tmp_module_path = '/tmp/lighttpd/lib'
if not self._filesystem.exists(tmp_module_path):
self._filesystem.maybe_make_directory(tmp_module_path)
lib_file = 'liblightcomp.dylib'
self._filesystem.copyfile(self._filesystem.join(module_path, lib_file),
self._filesystem.join(tmp_module_path, lib_file))
self._start_cmd = start_cmd
self._env = self._port_obj.setup_environ_for_server('lighttpd')
self._mappings = mappings
def _remove_stale_logs(self):
# Sometimes logs are open in other processes but they should clear eventually.
for log_prefix in ('access.log-', 'error.log-'):
try:
self._remove_log_files(self._output_dir, log_prefix)
except OSError, e:
_log.warning('Failed to remove old %s %s files' % (self._name, log_prefix))
def _spawn_process(self):
_log.debug('Starting %s server, cmd="%s"' % (self._name, self._start_cmd))
process = self._executive.popen(self._start_cmd, env=self._env, shell=False, stderr=self._executive.PIPE)
pid = process.pid
self._filesystem.write_text_file(self._pid_file, str(pid))
return pid
def _stop_running_server(self):
# FIXME: It would be nice if we had a cleaner way of killing this process.
# Currently we throw away the process object created in _spawn_process,
# since there doesn't appear to be any way to kill the server any more
# cleanly using it than just killing the pid, and we need to support
# killing a pid directly anyway for run-webkit-httpd and run-webkit-websocketserver.
self._wait_for_action(self._check_and_kill)
if self._filesystem.exists(self._pid_file):
self._filesystem.remove(self._pid_file)
def _check_and_kill(self):
if self._executive.check_running_pid(self._pid):
host = self._port_obj.host
if host.platform.is_win() and not host.platform.is_cygwin():
# FIXME: https://bugs.webkit.org/show_bug.cgi?id=106838
# We need to kill all of the child processes as well as the
# parent, so we can't use executive.kill_process().
#
# If this is actually working, we should figure out a clean API.
self._executive.run_command(["taskkill.exe", "/f", "/t", "/pid", self._pid], error_handler=self._executive.ignore_error)
else:
self._executive.kill_process(self._pid)
return False
return True
|
{
"content_hash": "e2c58a1186e12fc2bf213cd539af2060",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 136,
"avg_line_length": 45.93,
"alnum_prop": 0.5592205530154583,
"repo_name": "lordmos/blink",
"id": "1fbf1321231976527a3ecb22c72d7a312fe225d2",
"size": "10716",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Tools/Scripts/webkitpy/layout_tests/servers/http_server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "6433"
},
{
"name": "C",
"bytes": "753714"
},
{
"name": "C++",
"bytes": "40028043"
},
{
"name": "CSS",
"bytes": "539440"
},
{
"name": "F#",
"bytes": "8755"
},
{
"name": "Java",
"bytes": "18650"
},
{
"name": "JavaScript",
"bytes": "25700387"
},
{
"name": "Objective-C",
"bytes": "426711"
},
{
"name": "PHP",
"bytes": "141755"
},
{
"name": "Perl",
"bytes": "901523"
},
{
"name": "Python",
"bytes": "3748305"
},
{
"name": "Ruby",
"bytes": "141818"
},
{
"name": "Shell",
"bytes": "9635"
},
{
"name": "XSLT",
"bytes": "49328"
}
],
"symlink_target": ""
}
|
from p2ner.core.namespace import Namespace, initNS
from abc import abstractmethod
class Scheduler(Namespace):
def sanityCheck(self, requirements):
return
for var in requirements:
if var not in self.g:
raise ValueError("%s is not a valid variable in current environment" % var)
@initNS
def __init__(self, *args, **kwargs):
self.sanityCheck(["control", "controlPipe", "traffic", "trafficPipe", "overlay"])
self.running = False
self.initScheduler(*args, **kwargs)
@abstractmethod
def initScheduler(self):
pass
@abstractmethod
def produceBlock(self):
pass
@abstractmethod
def start(self):
pass
@abstractmethod
def stop(self):
self.purgeNS()
@abstractmethod
def isRunning(self):
pass
@property
def bufferlist(self):
sid = self.stream.id
neighbours = self.overlay.getNeighbours()
p = [n for n in neighbours if n.s.get(sid)]
p = [n for n in p if n.s[sid].get("buffer")]
ret = {}
for peer in p:
buf = peer.s[sid]["buffer"]
buf.request = peer.s[sid].get("request", [])
ret[buf] = peer
return ret
|
{
"content_hash": "7912dcb4dc5d270a2418af929fad92b5",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 91,
"avg_line_length": 26.729166666666668,
"alnum_prop": 0.5681995323460639,
"repo_name": "schristakidis/p2ner",
"id": "2ac7e1b17d546cb9ed0c1e4fc186f069d597ba53",
"size": "1925",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "p2ner/abstract/scheduler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "303"
},
{
"name": "Python",
"bytes": "1319300"
}
],
"symlink_target": ""
}
|
"""
Module defining Mem5TraderD agent class.
"""
import random
from fms import agents
from fms.utils import BUY, SELL
from fms.utils.exceptions import MissingParameter
class Mem5TraderD(agents.Agent):
"""
Simulate an agent taking random decisions
This agent subclass should have two keys in the
args dict :
- maxprice : maximum order price (float)
- maxbuy : maximum quantity to buy (int)
If any of those parameters is missing, a MissingParameter
exception is raised.
>>> from fms.agents import zerointelligencetrader
>>> params = {'agents': [{'money':10000, 'stocks':200}]}
>>> agent = zerointelligencetrader.ZeroIntelligenceTrader(params)
Traceback (most recent call last):
...
MissingParameter: maxprice
>>> params = {'agents': [{'money':10000, 'stocks':200, 'args':[999]}]}
>>> agent = zerointelligencetrader.ZeroIntelligenceTrader(params)
Traceback (most recent call last):
...
MissingParameter: maxbuy
>>> params = {'agents': [{'money':10000, 'stocks':200, 'args':[999, 100]}]}
>>> agent = zerointelligencetrader.ZeroIntelligenceTrader(params)
>>> print agent.state()
Agent ... - owns $10000.00 and 200 securities
>>> print agent.maxprice
999
>>> print agent.maxbuy
100
The Mem5TraderD acts by returning a
dict with (direction, price, quantity) keys.
The 3 elements of the dict are randomly chosen,
in uniform distributions bounded by the previous
five successes and all bids.
It also defects by a random amount.
>>> len(agent.act())
3
- direction is buy or sell
- price is a %.2f float in [0.01,maxprice]
- quantity is an int in :
- if direction==BUY, [1,self.maxbuy]
- if direction==SELL, [1,self.stocks]
Thus, shortselling is not allowed.
"""
def __init__(self, params, offset=0):
agents.Agent.__init__(self, params, offset)
try:
self.maxprice = self.args[0]
except (AttributeError, IndexError):
raise MissingParameter, 'maxprice'
try:
self.maxbuy = self.args[1]
except IndexError:
raise MissingParameter, 'maxbuy'
del self.args
self.mem = 5
# Prices of previous self.mem successfull bids
self.successes = list()
# All bids
self.bids = list()
def act(self, world=None, market=None):
"""
Return order as a dict with keys in (direction, price, quantity).
If SELL, pick price between highest success, next highest bid.
If BUY, pick price between lowest success, next lowest bid.
To avoid short selling as far as possible, if # of stocks
is zero or negative, force BUY direction.
To avoid levering up as far as possible, if money
is zero or negative, force SELL.
"""
if self.stocks > 0 and self.money > 0:
direction = random.choice((BUY, SELL))
elif self.stocks <= 0:
# Short selling is forbidden
direction = BUY
else:
# money<=0, levering is discouraged
direction = SELL
shift = int(random.random() * self.maxprice * 10)/100.
if direction:
# SELL
try:
minp = max(self.successes)
except ValueError:
# No successes
minp = 0.01
try:
maxp = min([bid for bid in self.bids if bid > minp])
except ValueError:
# No higher bids
maxp = self.maxprice
quantity = random.randint(1, self.stocks)
else:
# BUY
try:
maxp = min(self.successes)
except ValueError:
# No successes
maxp = self.maxprice
try:
minp = max([bid for bid in self.bids if bid < maxp])
except ValueError:
# No lower bids
minp = 0.01
quantity = random.randint(1, self.maxbuy)
price = random.randint(int(minp*100), int(maxp*100))/100.
if direction:
price += shift
else:
price -= shift
self.bids.append(price)
return {'direction':direction, 'price':price, 'quantity':quantity}
def record(self, direction, price, quantity):
"""
Record transaction
"""
if direction:
self.stocks -= quantity
self.money += quantity*price
else:
self.stocks += quantity
self.money -= quantity*price
self.successes.append(price)
if len(self.successes) > self.mem:
self.successes.pop(0)
def _test():
"""
Run tests in docstrings
"""
import doctest
doctest.testmod(optionflags=+doctest.ELLIPSIS)
if __name__ == '__main__':
_test()
|
{
"content_hash": "755d2dc77aa387972360362708079e5b",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 79,
"avg_line_length": 32.287581699346404,
"alnum_prop": 0.5740890688259109,
"repo_name": "nikete/fms",
"id": "a9bccad35c82bcadaa41c997ed0dc999f9a2e738",
"size": "4985",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fms/contrib/coleman/agents/mem5traderd.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "234402"
},
{
"name": "Shell",
"bytes": "66"
}
],
"symlink_target": ""
}
|
import hashlib
import json
import logging
import urllib2
from controllers.gcm.gcm import GCMConnection
class NotificationSender(object):
WEBHOOK_VERSION = 1
@classmethod
def send_gcm(cls, notification):
gcm_connection = GCMConnection()
gcm_connection.notify_device(notification)
@classmethod
def send_ios(cls, notification):
pass
@classmethod
def send_webhook(cls, message, keys):
payload = json.dumps(message, ensure_ascii=True)
invalid_urls = []
for client in keys:
url = client[0]
secret = client[1]
ch = hashlib.sha1()
ch.update(secret)
ch.update(payload)
checksum = ch.hexdigest()
request = urllib2.Request(url, payload)
request.add_header("Content-Type", 'application/json; charset="utf-8"')
request.add_header("X-TBA-Checksum", checksum)
request.add_header("X-TBA-Version", '{}'.format(cls.WEBHOOK_VERSION))
try:
resp = urllib2.urlopen(request)
except urllib2.HTTPError, e:
if e.code == 400:
logging.error('400, Invalid message: ' + repr(gcm_post_json_str))
elif e.code == 401:
logging.error('401, Webhook unauthorized')
elif e.code == 404:
invalid_urls.append(url)
elif e.code == 500:
logging.error('500, Internal error on server sending message')
else:
logging.exception('Unexpected HTTPError: ' + str(e.code) + " " + e.msg + " " + e.read())
except Exception, ex:
logging.error("Other Exception: {}".format(str(ex)))
if invalid_urls:
logging.warning("Invalid urls while sending webhook: {}".format(str(invalid_urls)))
return False
return True
|
{
"content_hash": "8604dddc2bc1b683928aaa9e009fb7fa",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 108,
"avg_line_length": 32.46666666666667,
"alnum_prop": 0.5569815195071869,
"repo_name": "josephbisch/the-blue-alliance",
"id": "e2512d11d1d81e3ae2594bae9fc3d421823d5597",
"size": "1948",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "helpers/notification_sender.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "101"
},
{
"name": "CSS",
"bytes": "372199"
},
{
"name": "HTML",
"bytes": "5376975"
},
{
"name": "JavaScript",
"bytes": "267581"
},
{
"name": "PHP",
"bytes": "10727"
},
{
"name": "Python",
"bytes": "1597373"
},
{
"name": "Ruby",
"bytes": "4677"
},
{
"name": "Shell",
"bytes": "413"
}
],
"symlink_target": ""
}
|
from oslo.utils import excutils
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine.resources.vpc import VPC
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ElasticIp(resource.Resource):
PROPERTIES = (
DOMAIN, INSTANCE_ID,
) = (
'Domain', 'InstanceId',
)
ATTRIBUTES = (
ALLOCATION_ID,
) = (
'AllocationId',
)
properties_schema = {
DOMAIN: properties.Schema(
properties.Schema.STRING,
_('Set to "vpc" to have IP address allocation associated to your '
'VPC.'),
constraints=[
constraints.AllowedValues(['vpc']),
]
),
INSTANCE_ID: properties.Schema(
properties.Schema.STRING,
_('Instance ID to associate with EIP.'),
update_allowed=True
),
}
attributes_schema = {
ALLOCATION_ID: attributes.Schema(
_('ID that AWS assigns to represent the allocation of the address '
'for use with Amazon VPC. Returned only for VPC elastic IP '
'addresses.')
),
}
def __init__(self, name, json_snippet, stack):
super(ElasticIp, self).__init__(name, json_snippet, stack)
self.ipaddress = None
def _ipaddress(self):
if self.ipaddress is None and self.resource_id is not None:
if self.properties[self.DOMAIN]:
try:
ips = self.neutron().show_floatingip(self.resource_id)
except Exception as ex:
self.client_plugin('neutron').ignore_not_found(ex)
else:
self.ipaddress = ips['floatingip']['floating_ip_address']
else:
try:
ips = self.nova().floating_ips.get(self.resource_id)
except Exception as e:
self.client_plugin('nova').ignore_not_found(e)
else:
self.ipaddress = ips.ip
return self.ipaddress or ''
def handle_create(self):
"""Allocate a floating IP for the current tenant."""
ips = None
if self.properties[self.DOMAIN]:
from heat.engine.resources.internet_gateway import InternetGateway
ext_net = InternetGateway.get_external_network_id(self.neutron())
props = {'floating_network_id': ext_net}
ips = self.neutron().create_floatingip({
'floatingip': props})['floatingip']
self.ipaddress = ips['floating_ip_address']
self.resource_id_set(ips['id'])
LOG.info(_('ElasticIp create %s') % str(ips))
else:
try:
ips = self.nova().floating_ips.create()
except Exception as e:
with excutils.save_and_reraise_exception():
if self.client_plugin('nova').is_not_found(e):
msg = _("No default floating IP pool configured. "
"Set 'default_floating_pool' in nova.conf.")
LOG.error(msg)
if ips:
self.ipaddress = ips.ip
self.resource_id_set(ips.id)
LOG.info(_('ElasticIp create %s') % str(ips))
instance_id = self.properties[self.INSTANCE_ID]
if instance_id:
server = self.nova().servers.get(instance_id)
server.add_floating_ip(self._ipaddress())
def handle_delete(self):
if self.resource_id is None:
return
# may be just create an eip when creation, or create the association
# failed when creation, there will no association, if we attempt to
# disassociate, an exception will raised, we need
# to catch and ignore it, and then to deallocate the eip
instance_id = self.properties[self.INSTANCE_ID]
if instance_id:
try:
server = self.nova().servers.get(instance_id)
if server:
server.remove_floating_ip(self._ipaddress())
except Exception as e:
is_not_found = self.client_plugin('nova').is_not_found(e)
is_unprocessable_entity = self.client_plugin('nova').\
is_unprocessable_entity(e)
if (not is_not_found and not is_unprocessable_entity):
raise
# deallocate the eip
if self.properties[self.DOMAIN]:
try:
self.neutron().delete_floatingip(self.resource_id)
except Exception as ex:
self.client_plugin('neutron').ignore_not_found(ex)
else:
try:
self.nova().floating_ips.delete(self.resource_id)
except Exception as e:
self.client_plugin('nova').ignore_not_found(e)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
if self.INSTANCE_ID in prop_diff:
instance_id = prop_diff.get(self.INSTANCE_ID)
if instance_id:
# no need to remove the floating ip from the old instance,
# nova does this automatically when calling
# add_floating_ip().
server = self.nova().servers.get(instance_id)
server.add_floating_ip(self._ipaddress())
else:
# to remove the floating_ip from the old instance
instance_id_old = self.properties[self.INSTANCE_ID]
if instance_id_old:
server = self.nova().servers.get(instance_id_old)
server.remove_floating_ip(self._ipaddress())
def FnGetRefId(self):
return unicode(self._ipaddress())
def _resolve_attribute(self, name):
if name == self.ALLOCATION_ID:
return unicode(self.resource_id)
class ElasticIpAssociation(resource.Resource):
PROPERTIES = (
INSTANCE_ID, EIP, ALLOCATION_ID, NETWORK_INTERFACE_ID,
) = (
'InstanceId', 'EIP', 'AllocationId', 'NetworkInterfaceId',
)
properties_schema = {
INSTANCE_ID: properties.Schema(
properties.Schema.STRING,
_('Instance ID to associate with EIP specified by EIP property.'),
update_allowed=True
),
EIP: properties.Schema(
properties.Schema.STRING,
_('EIP address to associate with instance.'),
update_allowed=True
),
ALLOCATION_ID: properties.Schema(
properties.Schema.STRING,
_('Allocation ID for VPC EIP address.'),
update_allowed=True
),
NETWORK_INTERFACE_ID: properties.Schema(
properties.Schema.STRING,
_('Network interface ID to associate with EIP.'),
update_allowed=True
),
}
def FnGetRefId(self):
return self.physical_resource_name_or_FnGetRefId()
def validate(self):
'''
Validate any of the provided parameters
'''
super(ElasticIpAssociation, self).validate()
eip = self.properties[self.EIP]
allocation_id = self.properties[self.ALLOCATION_ID]
instance_id = self.properties[self.INSTANCE_ID]
ni_id = self.properties[self.NETWORK_INTERFACE_ID]
# to check EIP and ALLOCATION_ID, should provide one of
if bool(eip) == bool(allocation_id):
msg = _("Either 'EIP' or 'AllocationId' must be provided.")
raise exception.StackValidationFailed(message=msg)
# to check if has EIP, also should specify InstanceId
if eip and not instance_id:
msg = _("Must specify 'InstanceId' if you specify 'EIP'.")
raise exception.StackValidationFailed(message=msg)
# to check InstanceId and NetworkInterfaceId, should provide
# at least one
if not instance_id and not ni_id:
msg = _("Must specify at least one of 'InstanceId' "
"or 'NetworkInterfaceId'.")
raise exception.StackValidationFailed(message=msg)
def _get_port_info(self, ni_id=None, instance_id=None):
port_id = None
port_rsrc = None
if ni_id:
port_rsrc = self.neutron().list_ports(id=ni_id)['ports'][0]
port_id = ni_id
elif instance_id:
ports = self.neutron().list_ports(device_id=instance_id)
port_rsrc = ports['ports'][0]
port_id = port_rsrc['id']
return port_id, port_rsrc
def _neutron_add_gateway_router(self, float_id, network_id):
router = VPC.router_for_vpc(self.neutron(), network_id)
if router is not None:
floatingip = self.neutron().show_floatingip(float_id)
floating_net_id = \
floatingip['floatingip']['floating_network_id']
self.neutron().add_gateway_router(
router['id'], {'network_id': floating_net_id})
def _neutron_update_floating_ip(self, allocationId, port_id=None,
ignore_not_found=False):
try:
self.neutron().update_floatingip(
allocationId,
{'floatingip': {'port_id': port_id}})
except Exception as e:
if ignore_not_found:
self.client_plugin('neutron').ignore_not_found(e)
else:
raise
def _nova_remove_floating_ip(self, instance_id, eip,
ignore_not_found=False):
server = None
try:
server = self.nova().servers.get(instance_id)
server.remove_floating_ip(eip)
except Exception as e:
is_not_found = self.client_plugin('nova').is_not_found(e)
iue = self.client_plugin('nova').is_unprocessable_entity(e)
if ((not ignore_not_found and is_not_found) or
(not is_not_found and not iue)):
raise
return server
def _floatingIp_detach(self,
nova_ignore_not_found=False,
neutron_ignore_not_found=False):
eip = self.properties[self.EIP]
allocation_id = self.properties[self.ALLOCATION_ID]
instance_id = self.properties[self.INSTANCE_ID]
server = None
if eip:
# if has eip_old, to remove the eip_old from the instance
server = self._nova_remove_floating_ip(instance_id,
eip,
nova_ignore_not_found)
else:
# if hasn't eip_old, to update neutron floatingIp
self._neutron_update_floating_ip(allocation_id,
None,
neutron_ignore_not_found)
return server
def _handle_update_eipInfo(self, prop_diff):
eip_update = prop_diff.get(self.EIP)
allocation_id_update = prop_diff.get(self.ALLOCATION_ID)
instance_id = self.properties[self.INSTANCE_ID]
ni_id = self.properties[self.NETWORK_INTERFACE_ID]
if eip_update:
server = self._floatingIp_detach(neutron_ignore_not_found=True)
if server:
# then to attach the eip_update to the instance
server.add_floating_ip(eip_update)
self.resource_id_set(eip_update)
elif allocation_id_update:
self._floatingIp_detach(nova_ignore_not_found=True)
port_id, port_rsrc = self._get_port_info(ni_id, instance_id)
if not port_id or not port_rsrc:
LOG.error(_('Port not specified.'))
raise exception.NotFound(_('Failed to update, can not found '
'port info.'))
network_id = port_rsrc['network_id']
self._neutron_add_gateway_router(allocation_id_update, network_id)
self._neutron_update_floating_ip(allocation_id_update, port_id)
self.resource_id_set(allocation_id_update)
def _handle_update_portInfo(self, prop_diff):
instance_id_update = prop_diff.get(self.INSTANCE_ID)
ni_id_update = prop_diff.get(self.NETWORK_INTERFACE_ID)
eip = self.properties[self.EIP]
allocation_id = self.properties[self.ALLOCATION_ID]
# if update portInfo, no need to detach the port from
# old instance/floatingip.
if eip:
server = self.nova().servers.get(instance_id_update)
server.add_floating_ip(eip)
else:
port_id, port_rsrc = self._get_port_info(ni_id_update,
instance_id_update)
if not port_id or not port_rsrc:
LOG.error(_('Port not specified.'))
raise exception.NotFound(_('Failed to update, can not found '
'port info.'))
network_id = port_rsrc['network_id']
self._neutron_add_gateway_router(allocation_id, network_id)
self._neutron_update_floating_ip(allocation_id, port_id)
def _validate_update_properties(self, prop_diff):
# according to aws doc, when update allocation_id or eip,
# if you also change the InstanceId or NetworkInterfaceId,
# should go to Replacement flow
if self.ALLOCATION_ID in prop_diff or self.EIP in prop_diff:
instance_id = prop_diff.get(self.INSTANCE_ID)
ni_id = prop_diff.get(self.NETWORK_INTERFACE_ID)
if instance_id or ni_id:
raise resource.UpdateReplace(self.name)
# according to aws doc, when update the instance_id or
# network_interface_id, if you also change the EIP or
# ALLOCATION_ID, should go to Replacement flow
if (self.INSTANCE_ID in prop_diff or
self.NETWORK_INTERFACE_ID in prop_diff):
eip = prop_diff.get(self.EIP)
allocation_id = prop_diff.get(self.ALLOCATION_ID)
if eip or allocation_id:
raise resource.UpdateReplace(self.name)
def handle_create(self):
"""Add a floating IP address to a server."""
if self.properties[self.EIP]:
server = self.nova().servers.get(self.properties[self.INSTANCE_ID])
server.add_floating_ip(self.properties[self.EIP])
self.resource_id_set(self.properties[self.EIP])
LOG.debug('ElasticIpAssociation '
'%(instance)s.add_floating_ip(%(eip)s)',
{'instance': self.properties[self.INSTANCE_ID],
'eip': self.properties[self.EIP]})
elif self.properties[self.ALLOCATION_ID]:
ni_id = self.properties[self.NETWORK_INTERFACE_ID]
instance_id = self.properties[self.INSTANCE_ID]
port_id, port_rsrc = self._get_port_info(ni_id, instance_id)
if not port_id or not port_rsrc:
LOG.warn(_('Skipping association, resource not specified'))
return
float_id = self.properties[self.ALLOCATION_ID]
network_id = port_rsrc['network_id']
self._neutron_add_gateway_router(float_id, network_id)
self._neutron_update_floating_ip(float_id, port_id)
self.resource_id_set(float_id)
def handle_delete(self):
"""Remove a floating IP address from a server or port."""
if self.resource_id is None:
return
if self.properties[self.EIP]:
instance_id = self.properties[self.INSTANCE_ID]
eip = self.properties[self.EIP]
self._nova_remove_floating_ip(instance_id,
eip,
ignore_not_found=True)
elif self.properties[self.ALLOCATION_ID]:
float_id = self.properties[self.ALLOCATION_ID]
self._neutron_update_floating_ip(float_id,
port_id=None,
ignore_not_found=True)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
self._validate_update_properties(prop_diff)
if self.ALLOCATION_ID in prop_diff or self.EIP in prop_diff:
self._handle_update_eipInfo(prop_diff)
elif (self.INSTANCE_ID in prop_diff or
self.NETWORK_INTERFACE_ID in prop_diff):
self._handle_update_portInfo(prop_diff)
def resource_mapping():
return {
'AWS::EC2::EIP': ElasticIp,
'AWS::EC2::EIPAssociation': ElasticIpAssociation,
}
|
{
"content_hash": "5a0fea3b7e8e1903e5085b330608c8e3",
"timestamp": "",
"source": "github",
"line_count": 415,
"max_line_length": 79,
"avg_line_length": 40.99036144578313,
"alnum_prop": 0.5586385280112868,
"repo_name": "redhat-openstack/heat",
"id": "071a9682df1401ac434d8a6b00652eb85e981911",
"size": "17586",
"binary": false,
"copies": "1",
"ref": "refs/heads/f22-patches",
"path": "heat/engine/resources/eip.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4827027"
},
{
"name": "Shell",
"bytes": "26720"
}
],
"symlink_target": ""
}
|
"""\
============================
Audio Playback using PyMedia
============================
This component plays raw audio sent to its "inbox" inbox using the pymedia
library.
Example Usage
-------------
Playing 8KHz 16 bit mono raw audio from a file::
Pipeline( RateControlledFileReader("recording.raw", readmode="bytes", rate=8000*2/8,
Output(sample_rate=8000, channels=1, format="S16_LE"),
).run()
How does it work?
-----------------
Output uses the PyMedia library to play back audio to the current audio playback
device.
Send raw binary audio data strings to its "inbox" inbox.
This component will terminate if a shutdownMicroprocess or producerFinished
message is sent to the "control" inbox. The message will be forwarded on out of
the "signal" outbox just before termination.
"""
from Axon.Component import component
from Axon.Ipc import shutdownMicroprocess, producerFinished
import sys,os
from Axon.ThreadedComponent import threadedcomponent
import time
from math import log
import pymedia.muxer as muxer
import pymedia.audio.acodec as acodec
import pymedia.audio.sound as sound
from Kamaelia.Support.PyMedia.AudioFormats import format2PyMediaFormat
from Kamaelia.Support.PyMedia.AudioFormats import pyMediaFormat2format
from Kamaelia.Support.PyMedia.AudioFormats import format2BytesPerSample
class _Output(threadedcomponent):
"""\
Output([sample_rate][,channels][,format]) -> new Output component.
Outputs (plays) raw audio data sent to its "inbox" inbox using the PyMedia
library.
Keyword arguments:
- sample_rate -- Sample rate in Hz (default = 44100)
- channels -- Number of channels (default = 2)
- format -- Sample format (default = "S16_LE")
"""
def __init__(self, sample_rate=44100, channels=2, format="S16_LE"):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(_Output,self).__init__(queuelengths=20)
pformat = format2PyMediaFormat[format]
self.snd = sound.Output(sample_rate, channels, pformat)
def main(self):
while 1:
while self.dataReady("inbox"):
self.snd.play(self.recv("inbox"))
while self.dataReady("control"):
msg=self.recv("control")
if isinstance(msg, (producerFinished,shutdownMicroprocess)):
self.snd.stop()
return
self.send(msg,"signal")
self.pause()
from Chassis import Pipeline
from RateChunker import RateChunker
def Output(sample_rate=44100, channels=2, format="S16_LE", maximumLag = 0.0):
# no idea why, but it seems we need to pass to pymedia chunks of a
# sufficiently short duration to prevent playback artefacts
chunksize = sample_rate/40
# round to nearest power of 2
chunksize = 2**int(log(chunksize)/log(2))
datarate = chunksize
chunkrate = 1
quantasize = 1
return Pipeline(
10, RateChunker(datarate, quantasize, chunkrate),
10, _Output(sample_rate, channels, format)
)
__kamaelia_prefabs__ = ( Output, )
#__kamaelia_components__ = ( Output, )
|
{
"content_hash": "533d7b217ac59408f8e581481fd5130b",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 88,
"avg_line_length": 29.232142857142858,
"alnum_prop": 0.6423335369578497,
"repo_name": "sparkslabs/kamaelia_",
"id": "1cf6063b7f8eb770513ab064571dc33ab25dbb97",
"size": "4179",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MH/MobileReframe/Output.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "Makefile",
"bytes": "5768"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Pure Data",
"bytes": "7485482"
},
{
"name": "Python",
"bytes": "18896248"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707430"
}
],
"symlink_target": ""
}
|
"""The tests for the group fan platform."""
from unittest.mock import patch
import pytest
from homeassistant import config as hass_config
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_PERCENTAGE,
ATTR_PERCENTAGE_STEP,
DIRECTION_FORWARD,
DIRECTION_REVERSE,
DOMAIN,
SERVICE_OSCILLATE,
SERVICE_SET_DIRECTION,
SERVICE_SET_PERCENTAGE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
)
from homeassistant.components.group import SERVICE_RELOAD
from homeassistant.components.group.fan import DEFAULT_NAME
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
CONF_UNIQUE_ID,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CoreState
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from tests.common import assert_setup_component, get_fixture_path
FAN_GROUP = "fan.fan_group"
MISSING_FAN_ENTITY_ID = "fan.missing"
LIVING_ROOM_FAN_ENTITY_ID = "fan.living_room_fan"
PERCENTAGE_FULL_FAN_ENTITY_ID = "fan.percentage_full_fan"
CEILING_FAN_ENTITY_ID = "fan.ceiling_fan"
PERCENTAGE_LIMITED_FAN_ENTITY_ID = "fan.percentage_limited_fan"
FULL_FAN_ENTITY_IDS = [LIVING_ROOM_FAN_ENTITY_ID, PERCENTAGE_FULL_FAN_ENTITY_ID]
LIMITED_FAN_ENTITY_IDS = [CEILING_FAN_ENTITY_ID, PERCENTAGE_LIMITED_FAN_ENTITY_ID]
FULL_SUPPORT_FEATURES = SUPPORT_SET_SPEED | SUPPORT_DIRECTION | SUPPORT_OSCILLATE
CONFIG_MISSING_FAN = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [
MISSING_FAN_ENTITY_ID,
*FULL_FAN_ENTITY_IDS,
*LIMITED_FAN_ENTITY_IDS,
],
},
]
}
CONFIG_FULL_SUPPORT = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [*FULL_FAN_ENTITY_IDS],
},
]
}
CONFIG_LIMITED_SUPPORT = {
DOMAIN: [
{
"platform": "group",
CONF_ENTITIES: [*LIMITED_FAN_ENTITY_IDS],
},
]
}
CONFIG_ATTRIBUTES = {
DOMAIN: {
"platform": "group",
CONF_ENTITIES: [*FULL_FAN_ENTITY_IDS, *LIMITED_FAN_ENTITY_IDS],
CONF_UNIQUE_ID: "unique_identifier",
}
}
@pytest.fixture
async def setup_comp(hass, config_count):
"""Set up group fan component."""
config, count = config_count
with assert_setup_component(count, DOMAIN):
await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
async def test_state(hass, setup_comp):
"""Test handling of state."""
state = hass.states.get(FAN_GROUP)
# No entity has a valid state -> group state off
assert state.state == STATE_OFF
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert state.attributes[ATTR_ENTITY_ID] == [
*FULL_FAN_ENTITY_IDS,
*LIMITED_FAN_ENTITY_IDS,
]
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
# Set all entities as on -> group state on
hass.states.async_set(CEILING_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(LIVING_ROOM_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(PERCENTAGE_FULL_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(PERCENTAGE_LIMITED_FAN_ENTITY_ID, STATE_ON, {})
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
# Set all entities as off -> group state off
hass.states.async_set(CEILING_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(LIVING_ROOM_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_FULL_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_LIMITED_FAN_ENTITY_ID, STATE_OFF, {})
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_OFF
# Set first entity as on -> group state on
hass.states.async_set(CEILING_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(LIVING_ROOM_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_FULL_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_LIMITED_FAN_ENTITY_ID, STATE_OFF, {})
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
# Set last entity as on -> group state on
hass.states.async_set(CEILING_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(LIVING_ROOM_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_FULL_FAN_ENTITY_ID, STATE_OFF, {})
hass.states.async_set(PERCENTAGE_LIMITED_FAN_ENTITY_ID, STATE_ON, {})
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
# now remove an entity
hass.states.async_remove(PERCENTAGE_LIMITED_FAN_ENTITY_ID)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_OFF
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
# Test entity registry integration
entity_registry = er.async_get(hass)
entry = entity_registry.async_get(FAN_GROUP)
assert entry
assert entry.unique_id == "unique_identifier"
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
async def test_attributes(hass, setup_comp):
"""Test handling of state attributes."""
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_OFF
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert state.attributes[ATTR_ENTITY_ID] == [
*FULL_FAN_ENTITY_IDS,
*LIMITED_FAN_ENTITY_IDS,
]
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
hass.states.async_set(CEILING_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(LIVING_ROOM_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(PERCENTAGE_FULL_FAN_ENTITY_ID, STATE_ON, {})
hass.states.async_set(PERCENTAGE_LIMITED_FAN_ENTITY_ID, STATE_ON, {})
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
# Add Entity that supports speed
hass.states.async_set(
CEILING_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED,
ATTR_PERCENTAGE: 50,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == SUPPORT_SET_SPEED
assert ATTR_PERCENTAGE in state.attributes
assert state.attributes[ATTR_PERCENTAGE] == 50
assert ATTR_ASSUMED_STATE not in state.attributes
# Add Entity that supports
# ### Test assumed state ###
# ##########################
# Add Entity with a different speed should set assumed state
hass.states.async_set(
PERCENTAGE_LIMITED_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED,
ATTR_PERCENTAGE: 75,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert state.attributes[ATTR_PERCENTAGE] == int((50 + 75) / 2)
@pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)])
async def test_direction_oscillating(hass, setup_comp):
"""Test handling of direction and oscillating attributes."""
hass.states.async_set(
LIVING_ROOM_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: True,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_PERCENTAGE: 50,
},
)
hass.states.async_set(
PERCENTAGE_FULL_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: True,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_PERCENTAGE: 50,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert state.attributes[ATTR_ENTITY_ID] == [*FULL_FAN_ENTITY_IDS]
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == FULL_SUPPORT_FEATURES
assert ATTR_PERCENTAGE in state.attributes
assert state.attributes[ATTR_PERCENTAGE] == 50
assert state.attributes[ATTR_OSCILLATING] is True
assert state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
assert ATTR_ASSUMED_STATE not in state.attributes
# Add Entity that supports
# ### Test assumed state ###
# ##########################
# Add Entity with a different direction should set assumed state
hass.states.async_set(
PERCENTAGE_FULL_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: True,
ATTR_DIRECTION: DIRECTION_REVERSE,
ATTR_PERCENTAGE: 50,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert ATTR_PERCENTAGE in state.attributes
assert state.attributes[ATTR_PERCENTAGE] == 50
assert state.attributes[ATTR_OSCILLATING] is True
assert ATTR_ASSUMED_STATE in state.attributes
# Now that everything is the same, no longer assumed state
hass.states.async_set(
LIVING_ROOM_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: True,
ATTR_DIRECTION: DIRECTION_REVERSE,
ATTR_PERCENTAGE: 50,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert ATTR_PERCENTAGE in state.attributes
assert state.attributes[ATTR_PERCENTAGE] == 50
assert state.attributes[ATTR_OSCILLATING] is True
assert state.attributes[ATTR_DIRECTION] == DIRECTION_REVERSE
assert ATTR_ASSUMED_STATE not in state.attributes
hass.states.async_set(
LIVING_ROOM_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: False,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_PERCENTAGE: 50,
},
)
hass.states.async_set(
PERCENTAGE_FULL_FAN_ENTITY_ID,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: FULL_SUPPORT_FEATURES,
ATTR_OSCILLATING: False,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_PERCENTAGE: 50,
},
)
await hass.async_block_till_done()
state = hass.states.get(FAN_GROUP)
assert state.state == STATE_ON
assert ATTR_PERCENTAGE in state.attributes
assert state.attributes[ATTR_PERCENTAGE] == 50
assert state.attributes[ATTR_OSCILLATING] is False
assert state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
assert ATTR_ASSUMED_STATE not in state.attributes
@pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)])
async def test_state_missing_entity_id(hass, setup_comp):
"""Test we can still setup with a missing entity id."""
state = hass.states.get(FAN_GROUP)
await hass.async_block_till_done()
assert state.state == STATE_OFF
async def test_setup_before_started(hass):
"""Test we can setup before starting."""
hass.state = CoreState.stopped
assert await async_setup_component(hass, DOMAIN, CONFIG_MISSING_FAN)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get(FAN_GROUP).state == STATE_OFF
@pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)])
async def test_reload(hass, setup_comp):
"""Test the ability to reload fans."""
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get(FAN_GROUP).state == STATE_OFF
yaml_path = get_fixture_path("fan_configuration.yaml", "group")
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
"group",
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get(FAN_GROUP) is None
assert hass.states.get("fan.upstairs_fans") is not None
@pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)])
async def test_service_calls(hass, setup_comp):
"""Test calling services."""
await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True
)
assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_ON
assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_ON
assert hass.states.get(FAN_GROUP).state == STATE_ON
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 66},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_PERCENTAGE] == 66
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_PERCENTAGE] == 66
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_PERCENTAGE] == 66
assert fan_group_state.attributes[ATTR_PERCENTAGE_STEP] == 100 / 3
await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True
)
assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_OFF
assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_OFF
assert hass.states.get(FAN_GROUP).state == STATE_OFF
await hass.services.async_call(
DOMAIN,
SERVICE_SET_PERCENTAGE,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 100},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_PERCENTAGE] == 100
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_PERCENTAGE] == 100
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_PERCENTAGE] == 100
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 0},
blocking=True,
)
assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_OFF
assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_OFF
assert hass.states.get(FAN_GROUP).state == STATE_OFF
await hass.services.async_call(
DOMAIN,
SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: True},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_OSCILLATING] is True
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_OSCILLATING] is True
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_OSCILLATING] is True
await hass.services.async_call(
DOMAIN,
SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: False},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_OSCILLATING] is False
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_OSCILLATING] is False
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_OSCILLATING] is False
await hass.services.async_call(
DOMAIN,
SERVICE_SET_DIRECTION,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_FORWARD},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
await hass.services.async_call(
DOMAIN,
SERVICE_SET_DIRECTION,
{ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_REVERSE},
blocking=True,
)
living_room_fan_state = hass.states.get(LIVING_ROOM_FAN_ENTITY_ID)
assert living_room_fan_state.attributes[ATTR_DIRECTION] == DIRECTION_REVERSE
percentage_full_fan_state = hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID)
assert percentage_full_fan_state.attributes[ATTR_DIRECTION] == DIRECTION_REVERSE
fan_group_state = hass.states.get(FAN_GROUP)
assert fan_group_state.attributes[ATTR_DIRECTION] == DIRECTION_REVERSE
|
{
"content_hash": "e13eb94ea117329f7787253d6e6a5c42",
"timestamp": "",
"source": "github",
"line_count": 499,
"max_line_length": 84,
"avg_line_length": 35.81563126252505,
"alnum_prop": 0.6693710832587287,
"repo_name": "jawilson/home-assistant",
"id": "abb1dcf245a8a394dff140b85ec6b04874432b41",
"size": "17872",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/components/group/test_fan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
from model.contact import Contact
def test_add_contact_1(app):
app.contact.open_new_contact_page()
app.contact.create(Contact(name="Elena", Middle="Yrevna", Last_name="Korolevskaya", Nickname="ekorolevskaya",
Title="Title", Company="Name", Adress="Mira 2", Home_telephone="656506", Mobile="89632547821",
year="1992", adres_2="Adress 2", phone2="dgdrhtj", notes="segsrhr"))
app.contact.return_to_page()
def test_add_contact_2(app):
app.contact.open_new_contact_page()
app.contact.create(Contact(name="test", Middle="", Last_name="", Nickname="",
Title="", Company="", Adress="", Home_telephone="", Mobile="",
year="", adres_2="", phone2="", notes=""))
app.contact.return_to_page()
|
{
"content_hash": "07ff0269c8cf94f3d5cef457c4d61b61",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 114,
"avg_line_length": 43.833333333333336,
"alnum_prop": 0.6096324461343473,
"repo_name": "ekorolevskaya/zadanie",
"id": "1c8995082065d42b37cd8eaad53cebae9170c77b",
"size": "851",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_add_new_contact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14780"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup, find_packages
def read_file(filename):
"""Read a file into a string"""
path = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(path, filename)
try:
return open(filepath).read()
except IOError:
return ''
def get_readme():
"""Return the README file contents. Supports text,rst, and markdown"""
for name in ('README', 'README.rst', 'README.md'):
if os.path.exists(name):
return read_file(name)
return ''
# Use the docstring of the __init__ file to be the description
DESC = " ".join(__import__('mediacracy').__doc__.splitlines()).strip()
setup(
name="django-mediacracy",
version=__import__('mediacracy').get_version().replace(' ', '-'),
url='https://github.com/sixpearls/django-mediacracy',
author='Ben Margolis',
author_email='[email protected]',
description=DESC,
long_description=get_readme(),
packages=find_packages(),
include_package_data=True,
install_requires=read_file('requirements.txt'),
classifiers=[
'Framework :: Django',
],
)
|
{
"content_hash": "738d21f47f830f2913272e1639fe737a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 74,
"avg_line_length": 31.35135135135135,
"alnum_prop": 0.6155172413793103,
"repo_name": "sixpearls/django-mediacracy",
"id": "670f491b852af8920bd34e65c4ab705a88b4a156",
"size": "1160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "14696"
},
{
"name": "JavaScript",
"bytes": "9236"
},
{
"name": "Prolog",
"bytes": "3227"
},
{
"name": "Python",
"bytes": "50504"
},
{
"name": "Shell",
"bytes": "3178"
}
],
"symlink_target": ""
}
|
__author__ = 'aje'
#
# Copyright (c) 2008 - 2013 10gen, Inc. <http://10gen.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import sys
import re
import datetime
# The Blog Post Data Access Object handles interactions with the Posts collection
class BlogPostDAO:
# constructor for the class
def __init__(self, database):
self.db = database
self.posts = database.posts
# inserts the blog entry and returns a permalink for the entry
def insert_entry(self, title, post, tags_array, author):
print "inserting blog entry", title, post
# fix up the permalink to not include whitespace
exp = re.compile('\W') # match anything not alphanumeric
whitespace = re.compile('\s')
temp_title = whitespace.sub("_",title)
permalink = exp.sub('', temp_title)
# Build a new post
post = {"title": title,
"author": author,
"body": post,
"permalink":permalink,
"tags": tags_array,
"comments": [],
"date": datetime.datetime.utcnow()}
# now insert the post
try:
# XXX HW 3.2 Work Here to insert the post
self.posts.insert(post)
print "Inserting the post"
except:
print "Error inserting post"
print "Unexpected error:", sys.exc_info()[0]
return permalink
# returns an array of num_posts posts, reverse ordered
def get_posts(self, num_posts):
cursor = [] # Placeholder so blog compiles before you make your changes
# XXX HW 3.2 Work here to get the posts
cursor = self.posts.find().sort('date',-1).limit(num_posts)
l = []
for post in cursor:
post['date'] = post['date'].strftime("%A, %B %d %Y at %I:%M%p") # fix up date
if 'tags' not in post:
post['tags'] = [] # fill it in if its not there already
if 'comments' not in post:
post['comments'] = []
l.append({'title':post['title'], 'body':post['body'], 'post_date':post['date'],
'permalink':post['permalink'],
'tags':post['tags'],
'author':post['author'],
'comments':post['comments']})
return l
# find a post corresponding to a particular permalink
def get_post_by_permalink(self, permalink):
post = None
# XXX Work here to retrieve the specified post
post = self.posts.find_one({'permalink' : permalink})
if post is not None:
# fix up date
post['date'] = post['date'].strftime("%A, %B %d %Y at %I:%M%p")
return post
# add a comment to a particular blog post
def add_comment(self, permalink, name, email, body):
comment = {'author': name, 'body': body}
if (email != ""):
comment['email'] = email
try:
last_error = {'n':-1} # this is here so the code runs before you fix the next line
# XXX HW 3.3 Work here to add the comment to the designated post
self.posts.update({'permalink': permalink}, {'$push' : {'comments': comment}})
return last_error['n'] # return the number of documents updated
except:
print "Could not update the collection, error"
print "Unexpected error:", sys.exc_info()[0]
return 0
|
{
"content_hash": "b54ff658bc25c2eceb24c3632669e095",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 104,
"avg_line_length": 30.603053435114504,
"alnum_prop": 0.5739585931653779,
"repo_name": "italoag/M101P",
"id": "94abd6563f299bfb25797edc67fe90ee5b54c4a2",
"size": "4009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Week3/hw3-2and3-3/blogPostDAO.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "148708"
},
{
"name": "Python",
"bytes": "69217"
}
],
"symlink_target": ""
}
|
import configparser
import logging
import logging.config
import os
import tempfile
import yaml
import dcm
import dcm.agent.cloudmetadata as cloudmetadata
from dcm.agent.cloudmetadata import CLOUD_TYPES
import dcm.agent.connection.websocket as websocket
import dcm.agent.exceptions as exceptions
import dcm.agent.job_runner as job_runner
from dcm.agent.plugins.api.exceptions import AgentPluginConfigException
import dcm.agent.tests.utils.test_connection as test_connection # TODO
import dcm.agent.utils as utils
_g_logger = logging.getLogger(__name__)
_g_conf_file_env = "DCM_AGENT_CONF"
class PLATFORM_TYPES(object):
PLATFORM_UBUNTU = "ubuntu"
PLATFORM_RHEL = "rhel"
PLATFORM_CENTOS = "centos"
PLATFORM_DEBIAN = "debian"
def get_python_script_dir():
# we allow it to pull out of the python package for tests and
# installs that are done from something other than out packaging
_ROOT = dcm.agent.get_root_location()
return os.path.join(_ROOT, 'scripts')
def get_connection_object(conf):
con_type = conf.connection_type
if not con_type:
raise exceptions.AgentOptionValueNotSetException("connection_type")
# XXX should we stevedore load this or __import__ it or go with a
# hard coded list? for now hard coded list
if con_type == "success_tester":
source_file = conf.connection_source_file
if not source_file:
raise exceptions.AgentOptionValueNotSetException(
"[connection]source_file",
msg="Using the %s connection type." % con_type)
fptr = open(source_file, "r")
if not conf.connection_dest_file:
raise exceptions.AgentOptionValueNotSetException(
"[connection]dest_file",
msg="Using the %s connection type." % con_type)
outf = open(conf.connection_dest_file, "w")
con = test_connection.TestConnection(fptr, outf)
elif con_type == "ws":
if not conf.connection_agentmanager_url:
raise exceptions.AgentOptionValueNotSetException(
"[connection]agentmanager_url")
con = websocket.WebSocketConnection(
conf.connection_agentmanager_url,
backoff_amount=conf.connection_backoff,
max_backoff=conf.connection_max_backoff,
heartbeat=conf.connection_heartbeat_frequency,
allow_unknown_certs=conf.connection_allow_unknown_certs,
ca_certs=conf.connection_ca_cert)
else:
raise exceptions.AgentOptionValueException(
"[connection]type", con_type, "ws,success_tester,dummy")
return con
class ConfigOpt(object):
def __init__(self, section, name, t, default=None,
options=None, minv=None, maxv=None, help_msg=None,
hidden=False):
self.section = section
self.name = name
self.my_type = t
self.options = options
self.default = default
self.minv = minv
self.maxv = maxv
self.help_msg = help_msg
self.features = {}
self.hidden = hidden
def get_option_name(self):
option_name = "%s_%s" % (self.section, self.name)
return option_name
def get_default(self):
return self.default
def get_help(self):
return self.help_msg
def get_value(self, parser, default=None, **kwargs):
if default is None:
default = self.default
try:
v = parser.get(self.section, self.name, fallback=default)
except configparser.NoOptionError:
v = default
except configparser.NoSectionError:
v = default
if v is None:
return v
try:
if self.my_type == list:
v = v.split(",")
elif self.my_type == bool:
if type(v) == str:
v = (v.lower() == "true" or v.lower() == "yes")
else:
v = bool(v)
else:
v = self.my_type(v)
except ValueError:
raise exceptions.AgentOptionTypeException(
self.name, self.my_type, v)
if self.options is not None:
vx = v
if type(v) == str:
vx = vx.lower()
if vx not in self.options:
raise exceptions.AgentOptionValueException(
self.name, self.options, v)
if self.my_type == int or self.my_type == float:
if self.minv is not None and v < self.minv:
raise exceptions.AgentOptionRangeException(
self.name, self.minv, self.maxv)
if self.maxv is not None and v > self.maxv:
raise exceptions.AgentOptionValueException(
self.name, self.minv, self.maxv)
return v
class FilenameOpt(ConfigOpt):
def __init__(self, section, name, default=None, help_msg=None):
super(FilenameOpt, self).__init__(section, name, str, default=default,
help_msg=help_msg)
def get_value(self, parser, relative_path=None, **kwarg):
v = super(FilenameOpt, self).get_value(parser)
if v is None:
return None
if not os.path.isabs(v):
v = os.path.join(relative_path, v)
return os.path.abspath(v)
class AgentConfig(object):
"""
This is a serializable object that is threaded through to all classes.
When/if multiprocessing is used it will be send to the worker threads.
It is semi-read-only. Any write operation must be done with thread
primitives. The exception is set handshake because that will be done
before any thread is created.
"""
def __init__(self, conf_files):
self._cli_args = None
self._remaining_argv = None
self.instance_id = None
self.jr = None
self.state = "STARTING"
self.features = {}
self.agent_id = None
self.customer_id = None
self.server_id = None
self.server_name = None
self.storage_dbfile = None
self.meta_data_object = None # until we call set_metadata_object
self.config_files = conf_files
self.parse_config_files(build_options_list(), add_features="features")
# here is where we set which Meta object to use from cloudmetadata.py
cloudmetadata.set_metadata_object(self)
self._normalize_options()
setup_logging(self.logging_configfile)
def _normalize_options(self):
if self.storage_dbfile is None:
self.storage_dbfile = \
os.path.join(self.storage_base_dir, "secure", "agentdb.sql")
if self.storage_script_dir is None:
self.storage_script_dir = \
os.path.join(self.storage_base_dir, "bin")
if self.storage_script_dir == "/PYTHON_LIBS_SCRIPTS":
self.storage_script_dir = None
if self.platform_name is None or self.platform_version is None:
distro_name, distro_version = utils.identify_platform(self)
self.platform_name = distro_name
self.platform_version = distro_version
def get_script_location(self, name):
if self.storage_script_dir is not None:
path = os.path.join(self.storage_script_dir, name)
_g_logger.debug("Script location %s" % path)
if not os.path.exists(path):
raise AgentPluginConfigException(
"There is no proper configuration for %s" % name)
return path
script_dir = get_python_script_dir()
_g_logger.debug("Script Dir %s" % script_dir)
for platform in self.platform_script_locations:
_g_logger.debug("Script platform %s" % platform)
path = os.path.join(script_dir, platform, name)
_g_logger.debug("Script location %s" % path)
if os.path.exists(path):
return path
return None
def is_upgrading(self):
return False
def start_job_runner(self):
self.jr = job_runner.JobRunner(self)
def stop_job_runner(self):
if self.jr:
self.jr.shutdown()
self.jr = None
def get_temp_file(self, filename, isdir=False):
new_dir = tempfile.mkdtemp(dir=self.storage_temppath)
if isdir:
return new_dir
return os.path.join(new_dir, filename)
def parse_config_files(self, option_list, add_features=None):
# set all the default values on the agent conf object
for o in option_list:
k = o.get_option_name()
v = o.get_default()
setattr(self, k, v)
for config_file in self.config_files:
relative_path = os.path.dirname(config_file)
parser = configparser.ConfigParser()
parser.read(config_file)
if add_features is not None:
try:
features = parser.items(add_features)
for k, v in features:
self.features[k] = v
except configparser.NoSectionError:
pass
for opt in option_list:
try:
oname = opt.get_option_name()
v = opt.get_value(parser, relative_path=relative_path,
default=getattr(self, oname))
setattr(self, oname, v)
except configparser.NoSectionError:
raise exceptions.AgentOptionSectionNotFoundException(
opt.name)
def get_secure_dir(self):
token_dir = os.path.join(self.storage_base_dir, "secure")
if not os.path.exists(token_dir):
os.mkdir(token_dir, 0o700)
# At some point we should validate that only this user can read this
# file
# utils.validate_file_permissions(
# token_dir, username=self.conf.system_user, permissions=0700)
#
return token_dir
def build_options_list():
option_list = [
ConfigOpt("pydev", "host", str, default=None, options=None,
help_msg="The hostname of the pydev debugger"),
ConfigOpt("pydev", "port", int, default=None, options=None,
help_msg="The port where the pydev debugger is listening"),
ConfigOpt("workers", "count", int, default=2, options=None,
help_msg="The number of worker threads that will be "
"processing incoming requests"),
ConfigOpt("workers", "long_runner_threads", int, default=1,
options=None,
help_msg="The number of worker threads that will be "
"processing long running plugins (anything that "
"returns a job description)"),
ConfigOpt("connection", "type", str, default="ws", options=None,
help_msg="The type of connection object to use. Supported "
"types are ws and fallback"),
FilenameOpt("connection", "source_file", default=None),
FilenameOpt("connection", "dest_file", default=None),
ConfigOpt("connection", "agentmanager_url", str, default=None,
help_msg="The url of the agent manager with which this "
"agent will communicate."),
ConfigOpt("connection", "backoff", int, default=1000,
help_msg="The number of milliseconds to add to the wait "
"time before retrying a failed connection."),
ConfigOpt("connection", "max_backoff", int, default=300000,
help_msg="The maximum number of milliseconds to wait before "
"retrying a failed connection."),
ConfigOpt("connection", "heartbeat_frequency", int, default=30,
help_msg="The maximum number of milliseconds to wait before "
"retrying a failed connection."),
ConfigOpt("connection", "allow_unknown_certs", bool, default=False,
help_msg="A flag to disable DCM certificate verification. "
"When disabled certificates will be ignored. This "
"is useful for testing but should otherwise be "
"set to False."),
FilenameOpt("connection", "ca_cert", default=None,
help_msg="A path to the location of the CA certificate to"
"be used when authenticating with DCM."),
FilenameOpt("logging", "configfile", default=None,
help_msg="The location of the log configuration file"),
FilenameOpt("plugin", "configfile",
help_msg="The location of the plugin configuration file"),
FilenameOpt("storage", "temppath", default="/tmp"),
FilenameOpt("storage", "base_dir", default="/dcm"),
FilenameOpt("storage", "mountpoint", default="/mnt/dcmdata"),
FilenameOpt("storage", "dbfile", default=None),
FilenameOpt("storage", "script_dir", default=None),
ConfigOpt("storage", "db_timeout", int, default=60*60*4,
help_msg="The amount of time in seconds for a request id to "
"stay in the database."),
ConfigOpt("storage", "default_filesystem", str, default="ext3"),
ConfigOpt("system", "user", str, default="dcm"),
ConfigOpt("system", "sudo", str, default="/usr/bin/sudo"),
ConfigOpt("intrusion_detection", "ossec", bool, default=False),
ConfigOpt("intrusion_detection", "max_process_time", float, default=5.0,
help_msg="This value specifics the amount of time that must "
"expire between processing the alerts file. This "
"value is here to prevent too many frequent alerts "
"from overwhelming the agent."),
ConfigOpt("intrusion_detection", "alert_threshold", int, default=10,
help_msg="The ossec alert level threshold to send to dcm."
" Any alert level below this threshold will be"
" logged locally but not forwarded back to DCM."),
ConfigOpt("cloud", "type", str, default=CLOUD_TYPES.UNKNOWN,
help_msg="The type of cloud on which this agent is running"),
ConfigOpt("cloud", "metadata_url", str,
default=None,
help_msg="The url of the metadata server. Not applicable "
"to all clouds."),
ConfigOpt("messaging", "retransmission_timeout", float,
default=5.0),
ConfigOpt("messaging", "max_at_once", int, default=-1,
help_msg="The maximum number of commands that can be "
"outstanding at once. -1 means infinity."),
ConfigOpt("platform", "script_locations", list,
default="common-linux"),
ConfigOpt("platform", "name", str, default=None,
help_msg="The platform/distribution on which this agent is"
"being installed. Must be used with "
"[platform]version.",
options=["ubuntu", "debian", "rhel",
"centos", "fedora"]),
ConfigOpt(
"platform", "version", str, default=None,
help_msg="The platform/distribution version on which this "
"agent is being installed. Must be used with "
"[platform]name."),
ConfigOpt("jobs", "retain_job_time", int, default=3600),
ConfigOpt("test", "skip_handshake", bool, default=False,
help_msg="This value is for internal testing only. "
"Do not change it.", hidden=True),
ConfigOpt("extra", "location", str,
default='http://s3.amazonaws.com/es-pyagent/',
help_msg="Location of extra packages"),
ConfigOpt("extra", "package_name", str, default=None,
help_msg="Name of extra package to be installed"),
ConfigOpt("extra", "base_path", str, default="/opt/dcm-agent-extras",
help_msg="The location where the extras package will be "
"installed. This should only change in conjunction"
" with the extras omnibus installer."),
ConfigOpt("configuration_management", "chef_client_version", str, default="11.16.4",
help_msg="Version of chef client to be installed")
]
return option_list
def setup_logging(logging_configfile):
top_logger = 'dcm.agent'
if logging_configfile is None:
loghandler = logging.StreamHandler()
top_logger = logging.getLogger("")
top_logger.setLevel(logging.DEBUG)
top_logger.addHandler(loghandler)
return
if not os.path.exists(logging_configfile):
raise exceptions.AgentOptionPathNotFoundException(
"logging:configfile", logging_configfile)
with open(logging_configfile, 'rt') as f:
config = yaml.load(f.read())
logging.config.dictConfig(config)
def get_config_files(conffile=None):
candidates = ["/dcm/etc/agent.conf"]
if _g_conf_file_env in os.environ:
candidates.append(os.environ[_g_conf_file_env])
if conffile:
candidates.append(conffile)
locations = []
for f in candidates:
f = os.path.abspath(f)
if os.path.exists(f):
if f not in locations:
locations.append(f)
return locations
|
{
"content_hash": "dcc997eb1a76aa559d080644b365a698",
"timestamp": "",
"source": "github",
"line_count": 451,
"max_line_length": 92,
"avg_line_length": 39.48558758314856,
"alnum_prop": 0.5783355795148248,
"repo_name": "JPWKU/unix-agent",
"id": "884c5445ebb8244fd50e0d47cae5682566e63765",
"size": "18391",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/dcm/agent/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "172"
},
{
"name": "Python",
"bytes": "743804"
},
{
"name": "Ruby",
"bytes": "79677"
},
{
"name": "Shell",
"bytes": "81231"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
from pybvc.controller.controller import Controller
from pybvc.controller.netconfnode import NetconfNode
from pybvc.common.status import STATUS
from pybvc.common.utils import load_dict_from_file
if __name__ == "__main__":
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit()
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
nodeName = d['nodeName']
nodeIpAddr = d['nodeIpAddr']
nodePortNum = d['nodePortNum']
nodeUname = d['nodeUname']
nodePswd = d['nodePswd']
except:
print ("Failed to get Controller device attributes")
exit(0)
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd)
node = NetconfNode(ctrl, nodeName, nodeIpAddr, nodePortNum,
nodeUname, nodePswd)
print (">>> Removing '%s' from the Controller '%s'"
% (nodeName, ctrlIpAddr))
result = ctrl.delete_netconf_node(node)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("'%s' was successfully removed from the Controller" % nodeName)
else:
print ("\n")
print ("!!!Failed, reason: %s" % status.brief().lower())
exit(0)
print "\n"
|
{
"content_hash": "044dfbc89c3e9e4093e84159baf2081f",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 78,
"avg_line_length": 35.154761904761905,
"alnum_prop": 0.7111412123264477,
"repo_name": "gaberger/pybvc",
"id": "486a6c0d2f24fd3b0499e43dc896f5021f297c3d",
"size": "2972",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "samples/samplenetconf/cmds/unmount.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Perl",
"bytes": "2452"
},
{
"name": "Python",
"bytes": "383836"
}
],
"symlink_target": ""
}
|
import itertools
from oslo_log import log as logging
from nova.db.main import api as db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
LOG = logging.getLogger(__name__)
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class InstanceFault(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: String attributes updated to support unicode
# Version 1.2: Added create()
VERSION = '1.2'
fields = {
'id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'code': fields.IntegerField(),
'message': fields.StringField(nullable=True),
'details': fields.StringField(nullable=True),
'host': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, fault, db_fault):
# NOTE(danms): These are identical right now
for key in fault.fields:
fault[key] = db_fault[key]
fault._context = context
fault.obj_reset_changes()
return fault
@base.remotable_classmethod
def get_latest_for_instance(cls, context, instance_uuid):
db_faults = db.instance_fault_get_by_instance_uuids(context,
[instance_uuid])
if instance_uuid in db_faults and db_faults[instance_uuid]:
return cls._from_db_object(context, cls(),
db_faults[instance_uuid][0])
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
values = {
'instance_uuid': self.instance_uuid,
'code': self.code,
'message': self.message,
'details': self.details,
'host': self.host,
}
db_fault = db.instance_fault_create(self._context, values)
self._from_db_object(self._context, self, db_fault)
self.obj_reset_changes()
@base.NovaObjectRegistry.register
class InstanceFaultList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# InstanceFault <= version 1.1
# Version 1.1: InstanceFault version 1.2
# Version 1.2: Added get_latest_by_instance_uuids() method
VERSION = '1.2'
fields = {
'objects': fields.ListOfObjectsField('InstanceFault'),
}
@base.remotable_classmethod
def get_latest_by_instance_uuids(cls, context, instance_uuids):
db_faultdict = db.instance_fault_get_by_instance_uuids(context,
instance_uuids,
latest=True)
db_faultlist = itertools.chain(*db_faultdict.values())
return base.obj_make_list(context, cls(context), objects.InstanceFault,
db_faultlist)
@base.remotable_classmethod
def get_by_instance_uuids(cls, context, instance_uuids):
db_faultdict = db.instance_fault_get_by_instance_uuids(context,
instance_uuids)
db_faultlist = itertools.chain(*db_faultdict.values())
return base.obj_make_list(context, cls(context), objects.InstanceFault,
db_faultlist)
|
{
"content_hash": "56e0cf1acca055d61c5a2cc016577610",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 79,
"avg_line_length": 37.840425531914896,
"alnum_prop": 0.5856058476244026,
"repo_name": "mahak/nova",
"id": "6d69e13ceba30c4e6a8bfcea8f26453e8d1a26b2",
"size": "4162",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/objects/instance_fault.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
"""admin.py ."""
# flake8: noqa
from django.contrib import admin
# Register your models here.
|
{
"content_hash": "aeb4096254694e32fc5c0871f2e06e9e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 32,
"avg_line_length": 19,
"alnum_prop": 0.7052631578947368,
"repo_name": "andrewtcrooks/taskorganizer",
"id": "569d440a82908c4bb450b02f89ce35e6329ec923",
"size": "95",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasklist/app/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6637"
},
{
"name": "Python",
"bytes": "16305"
}
],
"symlink_target": ""
}
|
from ciscoconfigparse
|
{
"content_hash": "b80e2abb7f3ec57b897ad9c05b8923d0",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 21,
"avg_line_length": 22,
"alnum_prop": 0.9090909090909091,
"repo_name": "cocoloco69/pynet",
"id": "af054d58afd33f964cdeec88031258e15c632871",
"size": "22",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ciscoconf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1005"
},
{
"name": "Python",
"bytes": "542188"
},
{
"name": "Shell",
"bytes": "5830"
}
],
"symlink_target": ""
}
|
import pytest
@pytest.fixture
def organism():
return{
'name': 'mouse',
'taxon_id': '9031'
}
@pytest.fixture
def organism_1(organism):
item = organism.copy()
item.update({
'schema_version': '1',
'status': 'CURRENT',
})
return item
def test_organism_upgrade(upgrader, organism_1):
value = upgrader.upgrade('organism', organism_1, target_version='2')
assert value['schema_version'] == '2'
assert value['status'] == 'current'
|
{
"content_hash": "92d5842836b08d0d3ee807799649c78e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 72,
"avg_line_length": 19.84,
"alnum_prop": 0.5987903225806451,
"repo_name": "kidaa/encoded",
"id": "1e99736c06fb0f5c666a7d0f4b970b36d8b14847",
"size": "496",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/encoded/tests/test_upgrade_organism.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "741"
},
{
"name": "CSS",
"bytes": "124947"
},
{
"name": "Cucumber",
"bytes": "17721"
},
{
"name": "HTML",
"bytes": "371787"
},
{
"name": "JavaScript",
"bytes": "550757"
},
{
"name": "Python",
"bytes": "863661"
},
{
"name": "Ruby",
"bytes": "992"
},
{
"name": "Shell",
"bytes": "2814"
}
],
"symlink_target": ""
}
|
"""
Our HADS database gets loaded up with duplicates, this cleans it up.
called from RUN_MIDNIGHT.sh
"""
import datetime
import sys
from pyiem.util import get_dbconn, utc, logger
LOG = logger()
def query(sql, args=None):
"""
Do a query and make it atomic
"""
pgconn = get_dbconn("hads")
hcursor = pgconn.cursor()
sts = datetime.datetime.now()
hcursor.execute("set work_mem='16GB'")
hcursor.execute(sql, args if args is not None else [])
ets = datetime.datetime.now()
LOG.debug(
"%7s [%8.4fs] %s", hcursor.rowcount, (ets - sts).total_seconds(), sql
)
hcursor.close()
pgconn.commit()
def workflow(valid):
"""Do the work for this date, which is set to 00 UTC"""
tbl = "raw%s" % (valid.strftime("%Y_%m"),)
# make sure our tmp table does not exist
query("DROP TABLE IF EXISTS tmp")
# Extract unique obs to special table
sql = (
f"CREATE table tmp as select distinct * from {tbl} "
"WHERE valid BETWEEN %s and %s"
)
args = (valid, valid + datetime.timedelta(hours=24))
query(sql, args)
# Delete them all!
sql = f"delete from {tbl} WHERE valid BETWEEN %s and %s"
query(sql, args)
# Insert from special table
sql = f"INSERT into {tbl} SELECT * from tmp"
query(sql)
sql = "DROP TABLE IF EXISTS tmp"
query(sql)
def main(argv):
"""Go Main Go"""
if len(argv) == 4:
utcnow = utc(int(argv[1]), int(argv[2]), int(argv[3]))
workflow(utcnow)
return
utcnow = utc().replace(hour=0, minute=0, second=0, microsecond=0)
# Run for 'yesterday' and 35 days ago
for day in [1, 35]:
workflow(utcnow - datetime.timedelta(days=day))
if __name__ == "__main__":
# See how we are called
main(sys.argv)
|
{
"content_hash": "47b0e00ca65e72897beb69d5083e9ec4",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 77,
"avg_line_length": 25.225352112676056,
"alnum_prop": 0.6063651591289783,
"repo_name": "akrherz/iem",
"id": "7badc234145692010f17f310d70f1043336c813b",
"size": "1791",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scripts/dbutil/hads_delete_dups.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16912"
},
{
"name": "HTML",
"bytes": "1092923"
},
{
"name": "Hack",
"bytes": "7078"
},
{
"name": "JavaScript",
"bytes": "244253"
},
{
"name": "PHP",
"bytes": "3492474"
},
{
"name": "Python",
"bytes": "3279270"
},
{
"name": "Rich Text Format",
"bytes": "30075"
},
{
"name": "Shell",
"bytes": "72284"
}
],
"symlink_target": ""
}
|
import commands
import logging
import sys
from libs import config_libs
from libs import utils_libs
from libs import verify_libs
def main():
# Run the Testcases:
test = test_gbp_prs_neg()
if test.test_gbp_prs_neg_1() == 0:
test.cleanup(tc_name='TESTCASE_GBP_PRS_NEG_1')
if test.test_gbp_prs_neg_2() == 0:
test.cleanup(tc_name='TESTCASE_GBP_PRS_NEG_2')
if test.test_gbp_prs_neg_3() == 0:
test.cleanup(tc_name='TESTCASE_GBP_PRS_NEG_3')
test.cleanup()
utils_libs.report_results('test_gbp_prs_neg', 'test_results.txt')
sys.exit(1)
class test_gbp_prs_neg(object):
# Initialize logging
logging.basicConfig(
format='%(asctime)s [%(levelname)s] %(name)s - %(message)s',
level=logging.WARNING)
_log = logging.getLogger(__name__)
cmd = 'rm /tmp/test_gbp_prs_neg.log'
commands.getoutput(cmd)
hdlr = logging.FileHandler('/tmp/test_gbp_prs_neg.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
_log.addHandler(hdlr)
_log.setLevel(logging.INFO)
_log.setLevel(logging.DEBUG)
def __init__(self):
"""
Init def
"""
self._log.info(
"\n## START OF GBP POLICY_RULE_SET NEGATIVE TESTSUITE\n")
self.gbpcfg = config_libs.Gbp_Config()
self.gbpverify = verify_libs.Gbp_Verify()
self.act_name = 'demo_pa'
self.cls_name = 'demo_pc'
self.rule_name = 'demo_pr'
self.ruleset_name = 'demo_prs'
self._log.info('\n## Step 1: Create a PC needed for PRS Testing ##')
self.cls_uuid = self.gbpcfg.gbp_policy_cfg_all(
1, 'classifier', self.cls_name)
if self.cls_uuid == 0:
self._log.info(
"\nReqd Policy Classifier Create Failed, "
"hence GBP Policy Rule-Set Negative Test Suite Run ABORTED\n")
return
self._log.info('\n## Step 1: Create a PA needed for PRS Testing ##')
self.act_uuid = self.gbpcfg.gbp_policy_cfg_all(
1, 'action', self.act_name)
if self.act_uuid == 0:
self._log.info(
"\nReqd Policy Action Create Failed, hence GBP "
"Policy Rule-Set Negative Test Suite Run ABORTED\n")
return
self._log.info('\n## Step 1: Create a PR needed for PRS Testing ##')
self.rule_uuid = self.gbpcfg.gbp_policy_cfg_all(
1, 'rule', self.rule_name, classifier=self.cls_name,
action=self.act_name)
if self.rule_uuid == 0:
self._log.info(
"\nReqd Policy Rule Create Failed, hence GBP "
"Policy Rule-Set Negative Test Suite Run ABORTED\n ")
return
def cleanup(self, tc_name=''):
if tc_name != '':
self._log.info('%s: FAILED' % (tc_name))
for obj in ['ruleset', 'rule', 'classifier', 'action']:
self.gbpcfg.gbp_del_all_anyobj(obj)
def test_gbp_prs_neg_1(self):
self._log.info(
"\n#################################################\n"
"TESTCASE_GBP_PRS_NEG_1: TO CREATE/VERIFY POLICY "
"RULESET with INVALID POLICY RULE\n"
"TEST_STEPS::\n"
"Create Policy RuleSet Object with Invalid PR\n"
"Verify the create FAILs and config rolls back\n"
"#################################################\n")
# Testcase work-flow starts
self._log.info("\n## Step 1: Create Policy RuleSet with Invalid PR ##")
if self.gbpcfg.gbp_policy_cfg_all(
1,
'ruleset',
self.ruleset_name,
policy_rules='INVALID') != 0:
self._log.info(
"# Step 1: Create RuleSet with Invalid PR did NOT Fail")
return 0
self._log.info('# Step 1A: Verify Policy RuleSet config rolled back')
if self.gbpverify.gbp_policy_verify_all(
1, 'ruleset', self.ruleset_name) != 0:
self._log.info(
"# Step 1A: Verify RuleSet config roll back did NOT Fail")
return 0
self._log.info("\nTESTCASE_GBP_PRS_NEG_1: PASSED")
return 1
def test_gbp_prs_neg_2(self):
self._log.info(
"\n###################################################\n"
"TESTCASE_GBP_PRS_NEG_2: TO CREATE/VERIFY POLICY "
"RULESET with mix of VALID and INVALID POLICY RULE\n"
"TEST_STEPS::\n"
"Create Policy RuleSet with a mix of Valid and Invalid PR\n"
"Verify the create FAILs and config rolls back\n"
"##################################################\n")
# Testcase work-flow starts
self._log.info(
"\n## Step 1: Create Policy RuleSet with mix of Valid "
"and Invalid PR ##")
if self.gbpcfg.gbp_policy_cfg_all(
1,
'ruleset',
self.ruleset_name,
policy_rules="'%s INVALID'" %
(self.rule_uuid)) != 0:
self._log.info(
"# Step 1: Create RuleSet with mix of Valid and "
"Invalid PR did NOT Fail")
return 0
self._log.info('# Step 1A: Verify Policy RuleSet config rolled back')
if self.gbpverify.gbp_policy_verify_all(
1, 'ruleset', self.ruleset_name) != 0:
self._log.info(
"# Step 1A: Verify RuleSet config roll back did NOT Fail")
return 0
self._log.info("\nTESTCASE_GBP_PRS_NEG_2: PASSED")
return 1
def test_gbp_prs_neg_3(self):
self._log.info(
"\n###################################################\n"
"TESTCASE_GBP_PRS_NEG_3: TO UPDATE/VERIFY POLICY "
"RULE with VALID and INVALID PR\n"
"TEST_STEPS::\n"
"Create a Policy RuleSet with default attribute\n"
"Update the Policy RuleSet with a mix of Valid and Invalid PR\n"
"Verify the update fails and config roll backs to "
"original values of the PRS\n"
"##################################################\n")
# Testcase work-flow starts
self._log.info('\n## Step 1: Create a PRS with default attribute ##\n')
ruleset_uuid = self.gbpcfg.gbp_policy_cfg_all(
1, 'ruleset', self.ruleset_name)
if ruleset_uuid == 0:
self._log.info(
"\n## Step 1: Create RuleSet with default attr == Failed")
return 0
self._log.info(
"\n## Step 2: Update the PRS with VALID PR and INVALID PR")
if self.gbpcfg.gbp_policy_cfg_all(
2, 'ruleset', ruleset_uuid, policy_rule='"%s INVALID"' %
(self.rule_name)) != 0:
self._log.info(
"\n## Step 2: Updating Policy RuleSet with VALID "
"and INVALID Policy Rules did NOT Fail")
return 0
self._log.info(
'# Step 2A: Verify RuleSet config update has been rolled back')
if self.gbpverify.gbp_policy_verify_all(
1,
'ruleset',
self.ruleset_name,
id=ruleset_uuid,
policy_rules=self.rule_uuid,
shared='False') != 0:
self._log.info("# Step 2A: Verify RuleSet roll back did NOT Fail")
return 0
self._log.info("\nTESTCASE_GBP_PRS_NEG_3: PASSED")
return 1
if __name__ == '__main__':
main()
|
{
"content_hash": "571f51a9976841da50d777595817f181",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 79,
"avg_line_length": 39.08247422680412,
"alnum_prop": 0.5209707201266157,
"repo_name": "jiahaoliang/group-based-policy",
"id": "9a122f8ac3c82ceafe654423cb2c0878ddf1acf6",
"size": "8155",
"binary": false,
"copies": "1",
"ref": "refs/heads/lbaasv2-mitaka-pull-request",
"path": "gbpservice/tests/contrib/gbpfunctests/testcases/tc_gbp_prs_neg.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1741199"
},
{
"name": "Shell",
"bytes": "27976"
}
],
"symlink_target": ""
}
|
"""
Assignment 3:
Use Pexpect to retrieve the output of 'show ip int brief' from pynet-rtr2.
"""
import pexpect
import re
from getpass import getpass
def main():
ip_addr ='50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass() # password: 88newclass
# Spawn a child process
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
# Set time out (in seconds)
ssh_conn.timeout = 5
ssh_conn.expect('ssword:')
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline("show ip int brief")
ssh_conn.expect("#")
# Print out texts before last expect command ('#')
print ssh_conn.before
if __name__ == "__main__":
main()
|
{
"content_hash": "7cf12976a1ab2410b22bee0c29290e58",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 82,
"avg_line_length": 21.705882352941178,
"alnum_prop": 0.6178861788617886,
"repo_name": "philuu12/PYTHON_4_NTWK_ENGRS",
"id": "8c51c27f97bea1664e41559af15dc1a2feb53cda",
"size": "760",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wk4_hw/ex3_pexpect.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "116931"
}
],
"symlink_target": ""
}
|
import unittest
from pathlib import Path
from pymatgen.io.cp2k.outputs import Cp2kOutput
from pymatgen.util.testing import PymatgenTest
class SetTest(PymatgenTest):
def setUp(self):
self.TEST_FILES_DIR = Path.joinpath(self.TEST_FILES_DIR, "cp2k")
self.out = Cp2kOutput(Path.joinpath(self.TEST_FILES_DIR, "cp2k.out"), auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames["PDOS"]), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), "GEO_OPT")
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "8b0568b1e3b2a84b813b79367ec66723",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 93,
"avg_line_length": 30.84,
"alnum_prop": 0.6731517509727627,
"repo_name": "richardtran415/pymatgen",
"id": "6e9281afe47a89fc47e2bbc9b13f35df6cd2fddd",
"size": "881",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymatgen/io/cp2k/tests/test_outputs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5100"
},
{
"name": "CSS",
"bytes": "7550"
},
{
"name": "Common Lisp",
"bytes": "3029065"
},
{
"name": "HTML",
"bytes": "827"
},
{
"name": "Makefile",
"bytes": "5573"
},
{
"name": "Perl",
"bytes": "229104"
},
{
"name": "Propeller Spin",
"bytes": "4026362"
},
{
"name": "Python",
"bytes": "6783497"
},
{
"name": "Roff",
"bytes": "1135003"
}
],
"symlink_target": ""
}
|
from importlib import import_module
import inspect
import os
import re
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.db import models
from django.core.exceptions import ViewDoesNotExist
from django.http import Http404
from django.core import urlresolvers
from django.contrib.admindocs import utils
from django.template.base import (builtins, get_library,
get_templatetags_modules, InvalidTemplateLibrary, libraries)
from django.template.engine import Engine
from django.utils.decorators import method_decorator
from django.utils._os import upath
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class BaseAdminDocsView(TemplateView):
"""
Base view for admindocs views.
"""
@method_decorator(staff_member_required)
def dispatch(self, request, *args, **kwargs):
if not utils.docutils_is_available:
# Display an error message for people without docutils
self.template_name = 'admin_doc/missing_docutils.html'
return self.render_to_response(admin.site.each_context(request))
return super(BaseAdminDocsView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update({'root_path': urlresolvers.reverse('admin:index')})
kwargs.update(admin.site.each_context(self.request))
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
class BookmarkletsView(BaseAdminDocsView):
template_name = 'admin_doc/bookmarklets.html'
def get_context_data(self, **kwargs):
context = super(BookmarkletsView, self).get_context_data(**kwargs)
context.update({
'admin_url': "%s://%s%s" % (
self.request.scheme, self.request.get_host(), context['root_path'])
})
return context
class TemplateTagIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_tag_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
tags = []
app_libs = list(six.iteritems(libraries))
builtin_libs = [(None, lib) for lib in builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
if library in builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'tags': tags})
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
class TemplateFilterIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_filter_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
filters = []
app_libs = list(six.iteritems(libraries))
builtin_libs = [(None, lib) for lib in builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
if library in builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'filters': filters})
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
class ViewIndexView(BaseAdminDocsView):
template_name = 'admin_doc/view_index.html'
def get_context_data(self, **kwargs):
views = []
urlconf = import_module(settings.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex, namespace, name) in view_functions:
views.append({
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
'url': simplify_regex(regex),
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
'namespace': ':'.join((namespace or [])),
'name': name,
})
kwargs.update({'views': views})
return super(ViewIndexView, self).get_context_data(**kwargs)
class ViewDetailView(BaseAdminDocsView):
template_name = 'admin_doc/view_detail.html'
def get_context_data(self, **kwargs):
view = self.kwargs['view']
urlconf = urlresolvers.get_urlconf()
if urlresolvers.get_resolver(urlconf)._is_callback(view):
mod, func = urlresolvers.get_mod_func(view)
view_func = getattr(import_module(mod), func)
else:
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
kwargs.update({
'name': view,
'summary': title,
'body': body,
'meta': metadata,
})
return super(ViewDetailView, self).get_context_data(**kwargs)
class ModelIndexView(BaseAdminDocsView):
template_name = 'admin_doc/model_index.html'
def get_context_data(self, **kwargs):
m_list = [m._meta for m in apps.get_models()]
kwargs.update({'models': m_list})
return super(ModelIndexView, self).get_context_data(**kwargs)
class ModelDetailView(BaseAdminDocsView):
template_name = 'admin_doc/model_detail.html'
def get_context_data(self, **kwargs):
model_name = self.kwargs['model_name']
# Get the model class.
try:
app_config = apps.get_app_config(self.kwargs['app_label'])
except LookupError:
raise Http404(_("App %(app_label)r not found") % self.kwargs)
try:
model = app_config.get_model(model_name)
except LookupError:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
opts = model._meta
title, body, metadata = utils.parse_docstring(model.__doc__)
if title:
title = utils.parse_rst(title, 'model', _('model:') + model_name)
if body:
body = utils.parse_rst(body, 'model', _('model:') + model_name)
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = utils.parse_rst(
(_("the related `%(app_label)s.%(data_type)s` object") % {
'app_label': app_label, 'data_type': data_type,
}),
'model',
_('model:') + data_type,
)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose,
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': app_label,
'object_name': data_type,
}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % field.name,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
# Gather model methods.
for func_name, func in model.__dict__.items():
if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose,
})
# Gather related objects
for rel in opts.related_objects:
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': rel.opts.app_label,
'object_name': rel.opts.object_name,
}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,
'data_type': 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % accessor,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
kwargs.update({
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': title,
'description': body,
'fields': fields,
})
return super(ModelDetailView, self).get_context_data(**kwargs)
class TemplateDetailView(BaseAdminDocsView):
template_name = 'admin_doc/template_detail.html'
def get_context_data(self, **kwargs):
template = self.kwargs['template']
templates = []
for dir in Engine.get_default().dirs:
template_file = os.path.join(dir, template)
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': lambda: open(template_file).read() if os.path.exists(template_file) else '',
'order': list(Engine.get_default().dirs).index(dir),
})
kwargs.update({
'name': template,
'templates': templates,
})
return super(TemplateDetailView, self).get_context_data(**kwargs)
####################
# Helper functions #
####################
def load_all_installed_template_libraries():
# Load/register all template tag libraries from installed apps.
for module_name in get_templatetags_modules():
mod = import_module(module_name)
if not hasattr(mod, '__file__'):
# e.g. packages installed as eggs
continue
try:
libraries = [
os.path.splitext(p)[0]
for p in os.listdir(os.path.dirname(upath(mod.__file__)))
if p.endswith('.py') and p[0].isalpha()
]
except OSError:
continue
else:
for library_name in libraries:
try:
get_library(library_name)
except InvalidTemplateLibrary:
pass
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(
patterns,
base + p.regex.pattern,
(namespace or []) + (p.namespace and [p.namespace] or [])
))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern,
namespace, p.name))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
named_group_matcher = re.compile(r'\(\?P(<\w+>).+?\)')
non_named_group_matcher = re.compile(r'\(.*?\)')
def simplify_regex(pattern):
"""
Clean up urlpattern regexes into something somewhat readable by Mere Humans:
turns something like "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "<sport_slug>/athletes/<athlete_slug>/"
"""
# handle named groups first
pattern = named_group_matcher.sub(lambda m: m.group(1), pattern)
# handle non-named groups
pattern = non_named_group_matcher.sub("<var>", pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '').replace('//', '/').replace('\\', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
|
{
"content_hash": "fe04ee57aa0af816fd5f773bee70b1ee",
"timestamp": "",
"source": "github",
"line_count": 405,
"max_line_length": 116,
"avg_line_length": 38.75555555555555,
"alnum_prop": 0.5522426095820592,
"repo_name": "edevil/django",
"id": "2ffd402c95d023043ecaa89d67b679194d64efb0",
"size": "15696",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/contrib/admindocs/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53429"
},
{
"name": "JavaScript",
"bytes": "103687"
},
{
"name": "Makefile",
"bytes": "5765"
},
{
"name": "Python",
"bytes": "10540191"
},
{
"name": "Shell",
"bytes": "10452"
}
],
"symlink_target": ""
}
|
""" a kernel implementation of a (localised) seamless update from a coupling between
two weighted ensembles (coarse and fine) to two coupled evenly weighted ensembles """
from __future__ import absolute_import
from __future__ import division
from firedrake import *
from firedrake.mg.utils import get_level
from fade import *
from fade.ml import *
from fade.emd.emd_kernel import *
import numpy as np
from pyop2.profiling import timed_stage
def seamless_coupling_update(ensemble_1, ensemble_2, weights_1, weights_2, r_loc_c=0, r_loc_f=0):
""" performs a seamless coupling (localised) ensemble transform update from a coupling
between two weighted ensembles (coarse and fine) into two evenly weighted ensembles.
NB: The two ensembles have to belong to the same hierarchy
:arg ensemble_1: list of :class:`Function`s in the coarse ensemble
:type ensemble_1: tuple / list
:arg ensemble_2: list of :class:`Function`s in the fine ensemble
:type ensemble_2: tuple / list
:arg weights_1: list of :class:`Function`s representing the importance weights for first
ensemble
:type weights_1: tuple / list
:arg weights_2: list of :class:`Function`s representing the importance weights for second
ensemble
:type weights_2: tuple / list
Optional Arguments:
:arg r_loc_c: Radius of coarsening localisation for the coarse cost functions. Default: 0
:type r_loc_c: int
:arg r_loc_f: Radius of coarsening localisation for the fine cost functions. Default: 0
:type r_loc_f: int
"""
if len(ensemble_1) < 1 or len(ensemble_2) < 1:
raise ValueError('ensembles cannot be indexed')
if len(weights_1) < 1 or len(weights_2) < 1:
raise ValueError('weights cannot be indexed')
# check that ensemble_1 and ensemble_2 have inputs in the same hierarchy
mesh_1 = ensemble_1[0].function_space().mesh()
mesh_2 = ensemble_2[0].function_space().mesh()
hierarchy_1, lvl_1 = get_level(mesh_1)
hierarchy_2, lvl_2 = get_level(mesh_2)
if lvl_1 is None or lvl_2 is None:
raise ValueError('Both ensembles to be coupled need to be on meshes part of same hierarchy')
if hierarchy_1 is not hierarchy_2:
raise ValueError('Both ensembles to be coupled need to be on meshes part of same hierarchy')
# check if 1 is coarse and 2 is fine
if lvl_1 < lvl_2:
ensemble_c = ensemble_1
weights_c = weights_1
ensemble_f = ensemble_2
weights_f = weights_2
else:
raise ValueError('Coarse ensemble needs to be the first ensemble, followed by a finer one')
n = len(ensemble_c)
if n is not len(ensemble_f):
raise ValueError('Both ensembles need to be of the same length')
# function spaces of both ensembles and create vector function space
fsc = ensemble_c[0].function_space()
fsf = ensemble_f[0].function_space()
fam = fsc.ufl_element().family()
deg = fsc.ufl_element().degree()
assert fam == fsf.ufl_element().family()
assert deg == fsf.ufl_element().degree()
vfsc = VectorFunctionSpace(mesh_1, fam, deg, dim=n)
vfsf = VectorFunctionSpace(mesh_2, fam, deg, dim=n)
# check that weights have same length
assert len(weights_c) == n
assert len(weights_f) == n
# check that weights add up to one
with timed_stage("Checking weights are normalized"):
cc = Function(fsc)
cf = Function(fsf)
for k in range(n):
cc.dat.data[:] += weights_c[k].dat.data[:]
cf.dat.data[:] += weights_f[k].dat.data[:]
if np.max(np.abs(cc.dat.data[:] - 1)) > 1e-3 or np.max(np.abs(cf.dat.data[:] - 1)) > 1e-3:
raise ValueError('Coarse weights dont add up to 1')
# preallocate new / intermediate ensembles and assign basis coeffs to new vector function
with timed_stage("Preallocating functions"):
ensemble_c_f = Function(vfsc)
ensemble_f_f = Function(vfsf)
new_ensemble_c_f = Function(vfsc)
new_ensemble_f_f = Function(vfsf)
int_ensemble_c_f = Function(vfsc)
if n == 1:
ensemble_c_f.dat.data[:] = ensemble_c[0].dat.data[:]
ensemble_f_f.dat.data[:] = ensemble_f[0].dat.data[:]
else:
for i in range(n):
ensemble_c_f.dat.data[:, i] = ensemble_c[i].dat.data[:]
ensemble_f_f.dat.data[:, i] = ensemble_f[i].dat.data[:]
# define even weights
with timed_stage("Preallocating functions"):
even_weights_c = []
even_weights_f = []
fc = Function(fsc).assign(1.0 / n)
ff = Function(fsf).assign(1.0 / n)
for k in range(n):
even_weights_c.append(fc)
even_weights_f.append(ff)
# inject fine weights and ensembles down to coarse mesh
with timed_stage("Injecting finer ensemble / weights down to coarse mesh"):
inj_ensemble_f_f = Function(vfsc)
inj_weights_f = []
totals = Function(fsc)
for i in range(n):
g = Function(fsc)
inject(weights_f[i], g)
inj_weights_f.append(g)
totals.dat.data[:] += inj_weights_f[i].dat.data[:]
inject(ensemble_f_f, inj_ensemble_f_f)
# re-normalize injected fine weights
for i in range(n):
inj_weights_f[i].dat.data[:] = np.divide(inj_weights_f[i].dat.data[:], totals.dat.data[:])
with timed_stage("Coupling between weighted coarse and fine ensembles"):
kernel_transform(ensemble_c_f, inj_ensemble_f_f, weights_c,
inj_weights_f, int_ensemble_c_f, r_loc_c)
with timed_stage("Finer ensemble transform"):
kernel_transform(ensemble_f_f, ensemble_f_f, weights_f,
even_weights_f, new_ensemble_f_f, r_loc_f)
with timed_stage("Coupling weighted intermediate ensemble and transformed finer ensemble"):
# inject transformed finer ensemble
inj_new_ensemble_f_f = Function(vfsc)
inj_f = Function(fsc)
f_f = Function(fsf)
if n == 1:
f_f.dat.data[:] = new_ensemble_f_f.dat.data[:]
inject(f_f, inj_f)
inj_new_ensemble_f_f.dat.data[:] = inj_f.dat.data[:]
else:
for i in range(n):
f_f.dat.data[:] = new_ensemble_f_f.dat.data[:, i]
inject(f_f, inj_f)
inj_new_ensemble_f_f.dat.data[:, i] = inj_f.dat.data[:]
kernel_transform(int_ensemble_c_f, inj_new_ensemble_f_f,
inj_weights_f, even_weights_c, new_ensemble_c_f, r_loc_c)
# check that components have the same mean
with timed_stage("Checking posterior mean consistency"):
mc = Function(fsc)
mf = Function(fsf)
for k in range(n):
mc.dat.data[:] += np.multiply(ensemble_c[k].dat.data[:], weights_c[k].dat.data[:])
mf.dat.data[:] += np.multiply(ensemble_f[k].dat.data[:], weights_f[k].dat.data[:])
# override ensembles
if n == 1:
ensemble_c[0].dat.data[:] = new_ensemble_c_f.dat.data[:]
ensemble_f[0].dat.data[:] = new_ensemble_f_f.dat.data[:]
else:
for i in range(n):
ensemble_c[i].dat.data[:] = new_ensemble_c_f.dat.data[:, i]
ensemble_f[i].dat.data[:] = new_ensemble_f_f.dat.data[:, i]
# reset weights
for i in range(n):
weights_c[i].assign(1.0 / n)
weights_f[i].assign(1.0 / n)
# check that components have the same mean
with timed_stage("Checking posterior mean consistency"):
mnc = Function(fsc)
mnf = Function(fsf)
for k in range(n):
mnc.dat.data[:] += np.multiply(ensemble_c[k].dat.data[:], weights_c[k].dat.data[:])
mnf.dat.data[:] += np.multiply(ensemble_f[k].dat.data[:], weights_f[k].dat.data[:])
assert np.max(np.abs(mnc.dat.data[:] - mc.dat.data[:])) < 1e-5
assert np.max(np.abs(mnf.dat.data[:] - mf.dat.data[:])) < 1e-5
return ensemble_c, ensemble_f
|
{
"content_hash": "223b9f19fc674d76e3196f0508259c48",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 100,
"avg_line_length": 39.5735294117647,
"alnum_prop": 0.6065898674594327,
"repo_name": "alsgregory/firedrake_da",
"id": "18bad7383af485c2661421f8f7982244e0aa2f2f",
"size": "8073",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fade/ml/coupling.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "21698"
},
{
"name": "Makefile",
"bytes": "269"
},
{
"name": "Python",
"bytes": "61089"
}
],
"symlink_target": ""
}
|
import _env
import leancloud
from config import leancloud_config
leancloud.init(leancloud_config.LeanCloud.APP_ID,
master_key=leancloud_config.LeanCloud.APP_MASTER_KEY)
|
{
"content_hash": "d89e2dfd7a329f7e0c654cf2c10e84eb",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 68,
"avg_line_length": 30.833333333333332,
"alnum_prop": 0.7675675675675676,
"repo_name": "PegasusWang/picwall",
"id": "25f382c6098d76f5d73322acc189c2702d6aebef",
"size": "231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "handlers/_leancloud_init.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "24880"
},
{
"name": "HTML",
"bytes": "59766"
},
{
"name": "JavaScript",
"bytes": "125557"
},
{
"name": "Python",
"bytes": "55632"
},
{
"name": "Shell",
"bytes": "202"
}
],
"symlink_target": ""
}
|
'''
Derived functions, name prefix specifies its container, like
* KS\_: nets.KeepSignFunc
* JC\_: nets.JointComplex
'''
import numpy as np
import pdb
from . import functions, pfunctions, linears
from .nets import KeepSignFunc, JointComplex
from .utils import dtype_c2r, dtype_r2c
__all__ = ['KS_Tanh', 'KS_Georgiou1992',
'JC_Tanh', 'JC_Sigmoid', 'JC_Georgiou1992']
def KS_Tanh(input_shape, itype, **kwargs):
'''
Function :math:`f(x) = \\tanh(|x|)\\exp(i \\theta_x)`.
References:
Hirose 1994
Returns:
KeepSignFunc: keep sign tanh layer.
'''
func = functions.Tanh(input_shape, dtype_c2r(itype), **kwargs)
ks = KeepSignFunc(func)
return ks
def KS_Georgiou1992(input_shape, itype, cr, var_mask=[False, False], **kwargs):
'''
Function :math:`f(x) = \\frac{x}{c+|x|/r}`
Args:
cr (tuplei, len=2): c and r.
var_mask (1darray, len=2, default=[False,False]):\
mask for variables (v, w), with v = -c*r and w = -cr/(1-r).
Returns:
KeepSignFunc: keep sign Georgiou's layer.
'''
c, r = cr
func = pfunctions.Mobius(input_shape, dtype_c2r(itype), params=np.array(
[0, -c * r / (1 - r), -c * r]), var_mask=[False] + list(var_mask),
**kwargs)
ks = KeepSignFunc(func)
return ks
def JC_Tanh(input_shape, itype, **kwargs):
'''
Function :math:`f(x) = \\tanh(\\Re[x]) + i\\tanh(\\Im[x])`.
References:
Kechriotis 1994
Returns:
JointComplex: joint complex tanh layer.
'''
func = functions.Tanh(input_shape, dtype_c2r(itype), **kwargs)
jc = JointComplex(func, func) # same function
return jc
def JC_Sigmoid(input_shape, itype, **kwargs):
'''
Function :math:`f(x) = \\sigma(\\Re[x]) + i\\sigma(\\Im[x])`.
References:
Birx 1992
Returns:
JointComplex: joint complex sigmoid layer.
'''
func = functions.Sigmoid(input_shape, dtype_c2r(itype), **kwargs)
jc = JointComplex(func, func) # same function
return jc
def JC_Georgiou1992(input_shape, itype, params, **kwargs):
'''
Function :math:`f(x) = \\text{Georgiou1992}\
(\\Re[x]) + i\\text{Georgiou1992}(\\Im[x])`.
Args:
params: params for Georgiou1992.
References:
Kuroe 2005
Returns:
JointComplex: joint complex Geogiou's layer.
'''
func = pfunctions.Georgiou1992(input_shape, dtype_c2r(
itype), params, var_mask=[False, False], **kwargs)
jc = JointComplex(func, func) # same function
return jc
|
{
"content_hash": "f3e09755bc73bf5944e6029101f172d8",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 25.76,
"alnum_prop": 0.5916149068322981,
"repo_name": "GiggleLiu/poorman_nn",
"id": "4333f3452d956fd7e73a9de60cca2f1461425376",
"size": "2576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "poornn/derivatives.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Fortran",
"bytes": "150441"
},
{
"name": "Makefile",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "202387"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
}
|
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
class HttpSuccessOperations(object):
"""HttpSuccessOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def head200(
self, custom_headers=None, raw=False, **operation_config):
"""Return 200 status code if successful.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/http/success/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 200)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def head204(
self, custom_headers=None, raw=False, **operation_config):
"""Return 204 status code if successful.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/http/success/204'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def head404(
self, custom_headers=None, raw=False, **operation_config):
"""Return 404 status code if successful.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: bool or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/http/success/404'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
{
"content_hash": "f583279921db437cbf3997a77add9268",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 140,
"avg_line_length": 40.15432098765432,
"alnum_prop": 0.6464258262874711,
"repo_name": "veronicagg/autorest",
"id": "582289e799dab05a2881dc6f4f55e76245412c84",
"size": "6979",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/Head/fixtures/acceptancetestshead/operations/http_success_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "36"
},
{
"name": "C#",
"bytes": "15043916"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "CoffeeScript",
"bytes": "64212"
},
{
"name": "Go",
"bytes": "149926"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "7894733"
},
{
"name": "JavaScript",
"bytes": "6955414"
},
{
"name": "PowerShell",
"bytes": "41223"
},
{
"name": "Python",
"bytes": "2111184"
},
{
"name": "Ruby",
"bytes": "182108"
},
{
"name": "Shell",
"bytes": "196"
},
{
"name": "TypeScript",
"bytes": "465386"
}
],
"symlink_target": ""
}
|
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationsamlpolicy_binding(base_resource):
""" Binding class showing the resources that can be bound to authenticationsamlpolicy_binding.
"""
def __init__(self) :
self._name = ""
self.authenticationsamlpolicy_authenticationvserver_binding = []
@property
def name(self) :
"""Name of the SAML policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the SAML policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def authenticationsamlpolicy_authenticationvserver_bindings(self) :
"""authenticationvserver that can be bound to authenticationsamlpolicy.
"""
try :
return self._authenticationsamlpolicy_authenticationvserver_binding
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationsamlpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationsamlpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(self, service, name) :
""" Use this API to fetch authenticationsamlpolicy_binding resource.
"""
try :
if type(name) is not list :
obj = authenticationsamlpolicy_binding()
obj.name = name
response = obj.get_resource(service)
else :
if name and len(name) > 0 :
obj = [authenticationsamlpolicy_binding() for _ in range(len(name))]
for i in range(len(name)) :
obj[i].name = name[i];
response[i] = obj[i].get_resource(service)
return response
except Exception as e:
raise e
class authenticationsamlpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationsamlpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationsamlpolicy_binding = [authenticationsamlpolicy_binding() for _ in range(length)]
|
{
"content_hash": "ad02ca673fd0669a98e64a470957a486",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 134,
"avg_line_length": 31.418367346938776,
"alnum_prop": 0.7141929197791491,
"repo_name": "mahabs/nitro",
"id": "d0fcec7e9a1412db1969bf94fb08b67ca300bf36",
"size": "3693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/resource/config/authentication/authenticationsamlpolicy_binding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "498"
},
{
"name": "Python",
"bytes": "10647176"
}
],
"symlink_target": ""
}
|
"""Zookeeper transport.
:copyright: (c) 2010 - 2013 by Mahendra M.
:license: BSD, see LICENSE for more details.
**Synopsis**
Connects to a zookeeper node as <server>:<port>/<vhost>
The <vhost> becomes the base for all the other znodes. So we can use
it like a vhost.
This uses the built-in kazoo recipe for queues
**References**
- https://zookeeper.apache.org/doc/trunk/recipes.html#sc_recipes_Queues
- https://kazoo.readthedocs.io/en/latest/api/recipe/queue.html
**Limitations**
This queue does not offer reliable consumption. An entry is removed from
the queue prior to being processed. So if an error occurs, the consumer
has to re-queue the item or it will be lost.
"""
import os
import socket
from queue import Empty
from kombu.utils.encoding import bytes_to_str, ensure_bytes
from kombu.utils.json import dumps, loads
from . import virtual
try:
import kazoo
from kazoo.client import KazooClient
from kazoo.recipe.queue import Queue
KZ_CONNECTION_ERRORS = (
kazoo.exceptions.SystemErrorException,
kazoo.exceptions.ConnectionLossException,
kazoo.exceptions.MarshallingErrorException,
kazoo.exceptions.UnimplementedException,
kazoo.exceptions.OperationTimeoutException,
kazoo.exceptions.NoAuthException,
kazoo.exceptions.InvalidACLException,
kazoo.exceptions.AuthFailedException,
kazoo.exceptions.SessionExpiredException,
)
KZ_CHANNEL_ERRORS = (
kazoo.exceptions.RuntimeInconsistencyException,
kazoo.exceptions.DataInconsistencyException,
kazoo.exceptions.BadArgumentsException,
kazoo.exceptions.MarshallingErrorException,
kazoo.exceptions.UnimplementedException,
kazoo.exceptions.OperationTimeoutException,
kazoo.exceptions.ApiErrorException,
kazoo.exceptions.NoNodeException,
kazoo.exceptions.NoAuthException,
kazoo.exceptions.NodeExistsException,
kazoo.exceptions.NoChildrenForEphemeralsException,
kazoo.exceptions.NotEmptyException,
kazoo.exceptions.SessionExpiredException,
kazoo.exceptions.InvalidCallbackException,
socket.error,
)
except ImportError:
kazoo = None # noqa
KZ_CONNECTION_ERRORS = KZ_CHANNEL_ERRORS = () # noqa
DEFAULT_PORT = 2181
__author__ = 'Mahendra M <[email protected]>'
class Channel(virtual.Channel):
"""Zookeeper Channel."""
_client = None
_queues = {}
def __init__(self, connection, **kwargs):
super().__init__(connection, **kwargs)
vhost = self.connection.client.virtual_host
self._vhost = '/{}'.format(vhost.strip('/'))
def _get_path(self, queue_name):
return os.path.join(self._vhost, queue_name)
def _get_queue(self, queue_name):
queue = self._queues.get(queue_name, None)
if queue is None:
queue = Queue(self.client, self._get_path(queue_name))
self._queues[queue_name] = queue
# Ensure that the queue is created
len(queue)
return queue
def _put(self, queue, message, **kwargs):
return self._get_queue(queue).put(
ensure_bytes(dumps(message)),
priority=self._get_message_priority(message, reverse=True),
)
def _get(self, queue):
queue = self._get_queue(queue)
msg = queue.get()
if msg is None:
raise Empty()
return loads(bytes_to_str(msg))
def _purge(self, queue):
count = 0
queue = self._get_queue(queue)
while True:
msg = queue.get()
if msg is None:
break
count += 1
return count
def _delete(self, queue, *args, **kwargs):
if self._has_queue(queue):
self._purge(queue)
self.client.delete(self._get_path(queue))
def _size(self, queue):
queue = self._get_queue(queue)
return len(queue)
def _new_queue(self, queue, **kwargs):
if not self._has_queue(queue):
queue = self._get_queue(queue)
def _has_queue(self, queue):
return self.client.exists(self._get_path(queue)) is not None
def _open(self):
conninfo = self.connection.client
hosts = []
if conninfo.alt:
for host_port in conninfo.alt:
if host_port.startswith('zookeeper://'):
host_port = host_port[len('zookeeper://'):]
if not host_port:
continue
try:
host, port = host_port.split(':', 1)
host_port = (host, int(port))
except ValueError:
if host_port == conninfo.hostname:
host_port = (host_port, conninfo.port or DEFAULT_PORT)
else:
host_port = (host_port, DEFAULT_PORT)
hosts.append(host_port)
host_port = (conninfo.hostname, conninfo.port or DEFAULT_PORT)
if host_port not in hosts:
hosts.insert(0, host_port)
conn_str = ','.join([f'{h}:{p}' for h, p in hosts])
conn = KazooClient(conn_str)
conn.start()
return conn
@property
def client(self):
if self._client is None:
self._client = self._open()
return self._client
class Transport(virtual.Transport):
"""Zookeeper Transport."""
Channel = Channel
polling_interval = 1
default_port = DEFAULT_PORT
connection_errors = (
virtual.Transport.connection_errors + KZ_CONNECTION_ERRORS
)
channel_errors = (
virtual.Transport.channel_errors + KZ_CHANNEL_ERRORS
)
driver_type = 'zookeeper'
driver_name = 'kazoo'
def __init__(self, *args, **kwargs):
if kazoo is None:
raise ImportError('The kazoo library is not installed')
super().__init__(*args, **kwargs)
def driver_version(self):
return kazoo.__version__
|
{
"content_hash": "75f9d213621555c39b0513b0196bd25f",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 78,
"avg_line_length": 29.870646766169155,
"alnum_prop": 0.6149233844103931,
"repo_name": "pexip/os-kombu",
"id": "30b0e2928bd875e11317fcc2d668582e012231e3",
"size": "6004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kombu/transport/zookeeper.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1844"
},
{
"name": "Python",
"bytes": "1088932"
}
],
"symlink_target": ""
}
|
"""Implementation of field projection."""
from collections.abc import Mapping, Sequence
FIELD_SEPARATOR = ","
FIELD_DEREFERENCE = "__"
def apply_subfield_projection(field, value, deep=False):
"""Apply projection from request context.
The passed dictionary may be mutated.
:param field: An instance of `Field` or `Serializer`
:type field: `Field` or `Serializer`
:param value: Dictionary to apply the projection to
:type value: dict
:param deep: Also process all deep projections
:type deep: bool
"""
# Discover the root manually. We cannot use either `self.root` or `self.context`
# due to a bug with incorrect caching (see DRF issue #5087).
prefix = []
root = field
while root.parent is not None:
# Skip anonymous serializers (e.g., intermediate ListSerializers).
if root.field_name:
prefix.append(root.field_name)
root = root.parent
prefix = prefix[::-1]
context = getattr(root, "_context", {})
# If there is no request, we cannot perform filtering.
request = context.get("request")
if request is None:
return value
filtered = set(request.query_params.get("fields", "").split(FIELD_SEPARATOR))
filtered.discard("")
if not filtered:
# If there are no fields specified in the filter, return all fields.
return value
# Extract projection for current and deeper levels.
current_level = len(prefix)
current_projection = []
for item in filtered:
item = item.split(FIELD_DEREFERENCE)
if len(item) <= current_level:
continue
if item[:current_level] == prefix:
if deep:
current_projection.append(item[current_level:])
else:
current_projection.append([item[current_level]])
if deep and not current_projection:
# For deep projections, an empty projection means that all fields should
# be returned without any projection.
return value
# Apply projection.
return apply_projection(current_projection, value)
def apply_projection(projection, value):
"""Apply projection."""
if isinstance(value, Sequence):
# Apply projection to each item in the list.
return [apply_projection(projection, item) for item in value]
elif not isinstance(value, Mapping):
# Non-dictionary values are simply ignored.
return value
# Extract projection for current level.
try:
current_projection = [p[0] for p in projection]
except IndexError:
return value
# Apply projection.
for name in list(value.keys()):
if name not in current_projection:
value.pop(name)
elif isinstance(value[name], dict):
# Apply projection recursively.
value[name] = apply_projection(
[p[1:] for p in projection if p[0] == name], value[name]
)
return value
|
{
"content_hash": "aabbc9c84415264ab6e81833479db211",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 84,
"avg_line_length": 32.26086956521739,
"alnum_prop": 0.6371293800539084,
"repo_name": "genialis/resolwe",
"id": "280956dee900cdbeb29531ab5dd6351f464c0b5e",
"size": "2968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resolwe/rest/projection.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PLpgSQL",
"bytes": "21533"
},
{
"name": "Python",
"bytes": "1813118"
},
{
"name": "Shell",
"bytes": "6244"
}
],
"symlink_target": ""
}
|
from api.models import match
import json
from django.http import HttpResponse
|
{
"content_hash": "e6503f6f537736a74144171492aef4ab",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 36,
"avg_line_length": 26,
"alnum_prop": 0.8461538461538461,
"repo_name": "nerdherd/NerdyScout",
"id": "165f48530be7793c09233071bd74425fc652c0f2",
"size": "78",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/matches.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "120"
},
{
"name": "CSS",
"bytes": "192701"
},
{
"name": "HTML",
"bytes": "8686"
},
{
"name": "JavaScript",
"bytes": "278717"
},
{
"name": "Python",
"bytes": "14531"
}
],
"symlink_target": ""
}
|
"""
shortest.py
----------------
Given a mesh and two vertex indices find the shortest path
between the two vertices while only traveling along edges
of the mesh.
"""
import trimesh
import networkx as nx
if __name__ == '__main__':
# test on a sphere mesh
mesh = trimesh.primitives.Sphere()
# edges without duplication
edges = mesh.edges_unique
# the actual length of each unique edge
length = mesh.edges_unique_length
# create the graph with edge attributes for length
g = nx.Graph()
for edge, L in zip(edges, length):
g.add_edge(*edge, length=L)
# alternative method for weighted graph creation
# you can also create the graph with from_edgelist and
# a list comprehension, which is like 1.5x faster
ga = nx.from_edgelist([(e[0], e[1], {'length': L})
for e, L in zip(edges, length)])
# arbitrary indices of mesh.vertices to test with
start = 0
end = int(len(mesh.vertices) / 2.0)
# run the shortest path query using length for edge weight
path = nx.shortest_path(g,
source=start,
target=end,
weight='length')
# VISUALIZE RESULT
# make the sphere transparent-ish
mesh.visual.face_colors = [100, 100, 100, 100]
# Path3D with the path between the points
path_visual = trimesh.load_path(mesh.vertices[path])
# visualizable two points
points_visual = trimesh.points.PointCloud(mesh.vertices[[start, end]])
# create a scene with the mesh, path, and points
scene = trimesh.Scene([
points_visual,
path_visual,
mesh])
scene.show(smooth=False)
|
{
"content_hash": "a316dda4ce41e93254c6810f8820527a",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 74,
"avg_line_length": 28.4,
"alnum_prop": 0.6185446009389671,
"repo_name": "dajusc/trimesh",
"id": "cc82810f255f3c4a049da9ca2d2f1987ab10dff6",
"size": "1704",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/shortest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "692521"
},
{
"name": "Shell",
"bytes": "3713"
}
],
"symlink_target": ""
}
|
import glob
import gzip
import pickle
with gzip.open('data.pkl.gz', 'rb') as pkl_file:
data = pickle.load(pkl_file)
print('Loading page information...')
pages = {}
for dimension in data:
for i in range(len(data[dimension][1])):
pages[data[dimension][1][i]] = 0
print('Processing view data...')
for stat_file_name in sorted(glob.glob('view_data/pagecounts-*')):
print(stat_file_name)
with gzip.open(stat_file_name, 'rt') as stat_file:
for line in stat_file.readlines():
line = line.split()
if line[0] == 'en' and line[1] in pages:
pages[line[1]] += int(line[2])
with gzip.open('view_stats.pkl.gz', 'wb') as output:
pickle.dump(pages, output, pickle.HIGHEST_PROTOCOL)
|
{
"content_hash": "17cf9bf9037814d2a465b63084293821",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 66,
"avg_line_length": 28.76923076923077,
"alnum_prop": 0.6256684491978609,
"repo_name": "mpetroff/nugacious",
"id": "4eaca8b513f1c51125527b836fb2359e8c266eb7",
"size": "862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "get_data/extract_views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5333"
},
{
"name": "HTML",
"bytes": "7987"
},
{
"name": "Python",
"bytes": "29761"
}
],
"symlink_target": ""
}
|
from rest_framework import serializers
from wenjuan.models.question import Answer
class AnswerModelSerializer(serializers.ModelSerializer):
"""
Answer Model Serializer
"""
class Meta:
model = Answer
fields = ("id", "question", "option", "answer")
class AnswerDetailSerializer(serializers.ModelSerializer):
"""
Answer Detail Serializer
"""
question = serializers.SlugRelatedField(slug_field="title", read_only=True)
# question_id = serializers.CharField(source="question_id", read_only=True)
class Meta:
model = Answer
fields = ("id", "question_id", "question", "option", "answer")
|
{
"content_hash": "3bd3a45e84938a771bfcc313e363c724",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 26.4,
"alnum_prop": 0.6727272727272727,
"repo_name": "codelieche/codelieche.com",
"id": "ee2bf93cfd4e7b22cb55c39d5fb984e0a3a67326",
"size": "683",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "blog.v1/apps/wenjuan/serializer/answer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4227"
},
{
"name": "HTML",
"bytes": "31177"
},
{
"name": "JavaScript",
"bytes": "26711"
},
{
"name": "Less",
"bytes": "4358"
},
{
"name": "Python",
"bytes": "157863"
},
{
"name": "SCSS",
"bytes": "24629"
},
{
"name": "Shell",
"bytes": "509"
}
],
"symlink_target": ""
}
|
"""measure_cocoapod_size.py provides size impact of a given set of cocoapods.
Usage: ./measure_cocoapod_size.py -cocoapods $POD_NAME:$POD_VERSION
"""
import argparse
import json
import os
import tempfile
from collections import OrderedDict
from xcode_project_diff import GenerateSizeDifference
from utils import shell
OBJC_APP_DIR = 'sizetestproject'
OBJC_APP_NAME = 'SizeTest'
SWIFT_APP_DIR = 'SwiftApp'
SWIFT_APP_NAME = 'SwiftApp'
SIZE_CONFIG_PATH = 'size_build_configuration.json'
IOS_VERSION_KEY = 'iOSVersion'
MODE_SWIFT = 'swift'
MODE_OBJC = 'objc'
DEFAULT_SPEC_REPOS = ['https://cdn.cocoapods.org/']
MASTER_SOURCE = 'master'
SPEC_REPO_DICT = {
'cpdc-internal': 'sso://cpdc-internal/spec',
'cpdc-eap': 'sso://cpdc-eap/spec',
'specsstaging': 'https://github.com/firebase/SpecsStaging',
MASTER_SOURCE: 'https://cdn.cocoapods.org/'
}
def GetSampleApp(mode):
if mode == MODE_SWIFT:
return SWIFT_APP_DIR, SWIFT_APP_NAME
else:
return OBJC_APP_DIR, OBJC_APP_NAME
def InstallPods(cocoapods, target_dir, spec_repos, target_name, mode, pod_sources, ios_version):
"""InstallPods installs the pods.
Args:
cocoapods: Mapping from pod names to pod versions.
target_dir: The target directory.
spec_repos: The set of spec repos.
target_name: The name of the target.
mode: The type of cocoapods.
pod_sources: A dict of Pod mapping to its source.
ios_version: iOS version of the project.
Returns:
The path to the workspace.
"""
cwd = os.getcwd()
os.chdir(target_dir)
shell('pod init')
shell('touch Podfile')
with open('Podfile', 'w') as podfile:
if ios_version is not None:
podfile.write('platform :ios, \'{}\'\n'.format(ios_version))
for repo in spec_repos:
podfile.write('source "{}"\n'.format(repo))
podfile.write('\n')
podfile.write('use_frameworks! :linkage => :static\n')
podfile.write('target \'{}\' do\n'.format(target_name))
for pod, version in cocoapods.items():
if version:
podfile.write(' pod \'{}\', \'{}\'\n'.format(pod, version))
elif pod_sources is not None:
# pod_sources[pod] should have pairs like:
# "sdk":"FirebaseDatabase" and
# "git":"sdk/repo.git", "branch":"main" or
# "path":"~/Documents/SDKrepo"
for pod_config in pod_sources['pods']:
if pod_config['sdk'] == pod:
pod_source_config = []
for config in pod_config.items():
if config[0] != 'sdk':
pod_source_config.append(":{} => \'{}\'".format(config[0], config[1]))
podfile.write(' pod \'{}\', {}\n'.format(pod, ",".join(pod_source_config)))
break
else:
podfile.write(' pod \'{}\'\n'.format(pod))
podfile.write('end')
shell('cat Podfile')
shell('pod install')
os.chdir(cwd)
return os.path.join(target_dir, '{}.xcworkspace'.format(target_name))
def CopyProject(source_dir, target_dir):
"""CopyProject copies the project from the source to the target.
Args:
source_dir: The path to the source directory.
target_dir: The path to the target directory.
"""
shell('cp -r {} {}'.format(source_dir, target_dir))
def ValidateSourceConfig(pod_sources):
if 'pods' not in pod_sources:
raise ValueError(
"The JSON config file should have 'pods' object containing pod configs.")
for pod_config in pod_sources['pods']:
source_keys = list(pod_config.keys())
try:
sdk = pod_config['sdk']
except KeyError:
print("SDK should be specified.")
raise
if sdk.strip() == "":
raise ValueError( "SDK should not be empty or blank.")
elif pod_config and ( source_keys[1] not in {"git", "path"} ):
raise ValueError(
"Pod source of SDK {} should be `git` or `path`.".format(sdk))
elif len(source_keys) == 3:
if source_keys[1] != "git":
raise ValueError(
"For multiple specs for the SDK {} ,`git` should be added with `branch`, `tag` or `commit`".format(sdk))
if source_keys[2] not in {"branch", "tag", "commit"}:
raise ValueError(
"A specified version of the SDK {} should be from `branch`, `tag` or `commit`.".format(sdk))
elif len(source_keys) > 3:
raise ValueError(
"Pod source of SDK {} can only specify `sdk` with `path`, `git`, or `git` and a reference (like a `branch`, `tag`, or `commit`)."
"See --help for an example config."
.format(sdk)
)
def GetPodSizeImpact(parsed_args):
"""GetPodSizeImpact gets the size impact of the set of pods.
Args:
parsed_args: The set of arguments passed to the program.
"""
sample_app_dir, sample_app_name = GetSampleApp(parsed_args.mode)
cocoapods = {}
if parsed_args.spec_repos:
spec_repos = []
# If cdn source is in spec_repos input, then it will be moved
# to the end and be added as the last source.
if MASTER_SOURCE in parsed_args.spec_repos:
parsed_args.spec_repos.append(
parsed_args.spec_repos.pop(
parsed_args.spec_repos.index(
MASTER_SOURCE)))
for repo in parsed_args.spec_repos:
if repo in SPEC_REPO_DICT:
spec_repos.append(SPEC_REPO_DICT[repo])
else:
spec_repos.append(repo)
else:
spec_repos = DEFAULT_SPEC_REPOS
for pod in parsed_args.cocoapods:
pod_info = pod.split(':')
pod_name = pod_info[0].strip()
if len(pod_info) > 1:
pod_version = pod_info[1].strip()
else:
pod_version = ''
cocoapods[pod_name] = pod_version
# Load JSON in order since in bleeding edge version of a Pod, `git` and
# `branch`/`tag`/`commit` are required and should be in order. e.g.
# pod 'Alamofire', :git => 'https://github.com/Alamofire/Alamofire.git', :branch => 'dev'
try:
if pod_version and parsed_args.cocoapods_source_config:
print("Since a version for the pod {} is specified, The config file {} \
will be validated but not used for binary measurement.".format(
pod_name, parsed_args.cocoapods_source_config.name))
pod_sources = json.load(parsed_args.cocoapods_source_config, \
object_pairs_hook=OrderedDict) if parsed_args.cocoapods_source_config else None
if pod_sources: ValidateSourceConfig(pod_sources)
except ValueError as e:
raise ValueError("could not decode JSON value %s: %s" % (parsed_args.cocoapods_source_config.name, e))
# Set iOS version for the project, the lowest iOS version of all targets
# will be added if the version is not specified. Since there is only one
# target in either the Objectve-C or the Swift testapp project, the version
# will be the one of the target.
ios_version = parsed_args.ios_version
if ios_version is None:
with open(SIZE_CONFIG_PATH, 'r') as size_config:
config_info = json.loads(size_config.read())
ios_version = config_info[IOS_VERSION_KEY] if IOS_VERSION_KEY in config_info else None
base_project = tempfile.mkdtemp()
target_project = tempfile.mkdtemp()
target_dir = os.path.join(target_project, sample_app_dir)
CopyProject(sample_app_dir, base_project)
CopyProject(sample_app_dir, target_project)
target_project = InstallPods(cocoapods,
target_dir,
spec_repos, sample_app_name, parsed_args.mode,
pod_sources, ios_version)
source_project = os.path.join(base_project,
'{}/{}.xcodeproj'.format(sample_app_dir, sample_app_name))
source_size, target_size = GenerateSizeDifference(
source_project, sample_app_name, target_project, sample_app_name, parsed_args.build_timeout)
if parsed_args.json:
# Transfer Podfile to JSON format.
podfile = shell('pod ipc podfile-json {}/Podfile'.format(target_dir), capture_stdout=True)
podfile_dict = json.loads(podfile)
podfile_dict['combined_pods_extra_size'] = target_size - source_size
with open(parsed_args.json, 'w') as json_file:
json.dump(podfile_dict, json_file)
# Throw an error if the target size is 0, an example for command
# ./measure_cocoapod_size.py --cocoapods FirebaseABTesting AnErrorPod:8.0.0
# This command will throw the following error:
# ValueError: The size of the following pod combination is 0 and this could be caused by a failed build.
# FirebaseABTesting
# AnErrorPod:8.0.0
if target_size == 0:
target_pods = "\n".join(
["{}:{}".format(pod,version) if version != "" else pod
for pod, version in cocoapods.items()])
raise ValueError(
"The size of the following pod combination is 0 and this could be caused by a failed build.\n{}".format(target_pods))
print('The pods combined add an extra size of {} bytes'.format(
target_size - source_size))
def Main():
"""Main generates the PodSize impact.
"""
parser = argparse.ArgumentParser(description='The size impact of a cocoapod',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
'--cocoapods',
metavar='N',
type=str,
nargs='+',
required=True,
help='The set of cocoapods')
parser.add_argument(
'--mode',
type=str,
choices=[MODE_SWIFT, MODE_OBJC],
default=MODE_OBJC,
help='Type of cocoapod'
)
parser.add_argument(
'--spec_repos',
metavar='N',
type=str,
nargs='+',
required=False,
help='The set of spec_repos')
parser.add_argument(
'--cocoapods_source_config',
metavar='CONFIG_JSON',
type=argparse.FileType('r'),
nargs='?',
required=False,
default=None,
help=''' A JSON file with customized pod source.E.g.
{
"pods":[
{
"sdk":"FirebaseDatabase",
"git":"https://github.com/firebase/firebase-ios-sdk",
"branch":"master"
}
]
}
If versions are specified in the `cocoapods` arg, config here will be skipped.
''')
parser.add_argument(
'--build_timeout',
metavar='SECONDS',
nargs='?',
required=False,
default=None,
help='Timeout to build testapps.')
parser.add_argument(
'--json',
metavar='OUTPUT_FILE_NAME',
nargs='?',
required=False,
default=None,
help='Output JSON file.')
parser.add_argument(
'--ios_version',
metavar='IOS_VERSION',
nargs='?',
required=False,
default=None,
help='Specify minimum ios version in the Podfile before a project is built.')
args = parser.parse_args()
GetPodSizeImpact(args)
if __name__ == '__main__':
Main()
|
{
"content_hash": "3f2a4248c6306566998576beea3db04e",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 137,
"avg_line_length": 35.19344262295082,
"alnum_prop": 0.6285634432643935,
"repo_name": "google/cocoapods-size",
"id": "ec123d61bcc2202c77c140146cd80ec098a21c23",
"size": "11331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "measure_cocoapod_size.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "4345"
},
{
"name": "Python",
"bytes": "19106"
},
{
"name": "Swift",
"bytes": "4901"
}
],
"symlink_target": ""
}
|
import os, unittest
import sqlite3 as sqlite
class CollationTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def CheckCreateCollationNotCallable(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("X", 42)
self.fail("should have raised a TypeError")
except TypeError, e:
self.assertEqual(e.args[0], "parameter must be callable")
def CheckCreateCollationNotAscii(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("collä", cmp)
self.fail("should have raised a ProgrammingError")
except sqlite.ProgrammingError, e:
pass
def CheckCollationIsUsed(self):
if sqlite.version_info < (3, 2, 1): # old SQLite versions crash on this test
return
def mycoll(x, y):
# reverse order
return -cmp(x, y)
con = sqlite.connect(":memory:")
con.create_collation("mycoll", mycoll)
sql = """
select x from (
select 'a' as x
union
select 'b' as x
union
select 'c' as x
) order by x collate mycoll
"""
result = con.execute(sql).fetchall()
if result[0][0] != "c" or result[1][0] != "b" or result[2][0] != "a":
self.fail("the expected order was not returned")
con.create_collation("mycoll", None)
try:
result = con.execute(sql).fetchall()
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0].lower(), "no such collation sequence: mycoll")
def CheckCollationRegisterTwice(self):
"""
Register two different collation functions under the same name.
Verify that the last one is actually used.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", cmp)
con.create_collation("mycoll", lambda x, y: -cmp(x, y))
result = con.execute("""
select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
""").fetchall()
if result[0][0] != 'b' or result[1][0] != 'a':
self.fail("wrong collation function is used")
def CheckDeregisterCollation(self):
"""
Register a collation, then deregister it. Make sure an error is raised if we try
to use it.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", cmp)
con.create_collation("mycoll", None)
try:
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
if not e.args[0].startswith("no such collation sequence"):
self.fail("wrong OperationalError raised")
class ProgressTests(unittest.TestCase):
def CheckProgressHandlerUsed(self):
"""
Test that the progress handler is invoked once it is set.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
con.execute("""
create table foo(a, b)
""")
self.assertTrue(progress_calls)
def CheckOpcodeCount(self):
"""
Test that the opcode argument is respected.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
curs = con.cursor()
curs.execute("""
create table foo (a, b)
""")
first_count = len(progress_calls)
progress_calls = []
con.set_progress_handler(progress, 2)
curs.execute("""
create table bar (a, b)
""")
second_count = len(progress_calls)
self.assertTrue(first_count > second_count)
def CheckCancelOperation(self):
"""
Test that returning a non-zero value stops the operation in progress.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 1
con.set_progress_handler(progress, 1)
curs = con.cursor()
self.assertRaises(
sqlite.OperationalError,
curs.execute,
"create table bar (a, b)")
def CheckClearHandler(self):
"""
Test that setting the progress handler to None clears the previously set handler.
"""
con = sqlite.connect(":memory:")
action = 0
def progress():
action = 1
return 0
con.set_progress_handler(progress, 1)
con.set_progress_handler(None, 1)
con.execute("select 1 union select 2 union select 3").fetchall()
self.assertEqual(action, 0, "progress handler was not cleared")
def suite():
collation_suite = unittest.makeSuite(CollationTests, "Check")
progress_suite = unittest.makeSuite(ProgressTests, "Check")
return unittest.TestSuite((collation_suite, progress_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
|
{
"content_hash": "e8204517903a240f978dc4d5085e47ef",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 91,
"avg_line_length": 34.25903614457831,
"alnum_prop": 0.5505538948478987,
"repo_name": "ktan2020/legacy-automation",
"id": "59a80e7d0994f79a1233ed0f8e9036d4112fb787",
"size": "6748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/sqlite3/test/hooks.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
}
|
"""Unittest for the type checker."""
import unittest
from astroid import test_utils
from pylint.checkers import typecheck
from pylint.testutils import CheckerTestCase, Message, set_config
class TypeCheckerTest(CheckerTestCase):
"Tests for pylint.checkers.typecheck"
CHECKER_CLASS = typecheck.TypeChecker
def test_no_member_in_getattr(self):
"""Make sure that a module attribute access is checked by pylint.
"""
node = test_utils.extract_node("""
import optparse
optparse.THIS_does_not_EXIST
""")
with self.assertAddsMessages(
Message(
'no-member',
node=node,
args=('Module', 'optparse', 'THIS_does_not_EXIST'))):
self.checker.visit_getattr(node)
@set_config(ignored_modules=('argparse',))
def test_no_member_in_getattr_ignored(self):
"""Make sure that a module attribute access check is omitted with a
module that is configured to be ignored.
"""
node = test_utils.extract_node("""
import argparse
argparse.THIS_does_not_EXIST
""")
with self.assertNoMessages():
self.checker.visit_getattr(node)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "7db01dbaf355ee0697a5de8ecfdbaea3",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 75,
"avg_line_length": 30.738095238095237,
"alnum_prop": 0.6119287374128582,
"repo_name": "willemneal/Docky",
"id": "33efe5da5c96b582fff6bbbb2b10ef6e18f5dd19",
"size": "1291",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "lib/pylint/test/unittest_checker_typecheck.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "APL",
"bytes": "587"
},
{
"name": "ASP",
"bytes": "636"
},
{
"name": "ActionScript",
"bytes": "5686"
},
{
"name": "Ada",
"bytes": "5145"
},
{
"name": "Agda",
"bytes": "3154"
},
{
"name": "Alloy",
"bytes": "6579"
},
{
"name": "AppleScript",
"bytes": "421"
},
{
"name": "Assembly",
"bytes": "3168"
},
{
"name": "AutoHotkey",
"bytes": "3733"
},
{
"name": "AutoIt",
"bytes": "667"
},
{
"name": "Awk",
"bytes": "4528"
},
{
"name": "BlitzBasic",
"bytes": "1730"
},
{
"name": "BlitzMax",
"bytes": "2387"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "Bro",
"bytes": "7337"
},
{
"name": "C",
"bytes": "109073"
},
{
"name": "C#",
"bytes": "17784"
},
{
"name": "C++",
"bytes": "79372"
},
{
"name": "COBOL",
"bytes": "114812"
},
{
"name": "CSS",
"bytes": "26952"
},
{
"name": "Ceylon",
"bytes": "1387"
},
{
"name": "Chapel",
"bytes": "4366"
},
{
"name": "Cirru",
"bytes": "2574"
},
{
"name": "Clean",
"bytes": "2878"
},
{
"name": "Clojure",
"bytes": "23871"
},
{
"name": "CoffeeScript",
"bytes": "20149"
},
{
"name": "ColdFusion",
"bytes": "9006"
},
{
"name": "Common Lisp",
"bytes": "91743"
},
{
"name": "Coq",
"bytes": "66"
},
{
"name": "Cuda",
"bytes": "776"
},
{
"name": "D",
"bytes": "5475"
},
{
"name": "Dart",
"bytes": "591"
},
{
"name": "Dylan",
"bytes": "6343"
},
{
"name": "Ecl",
"bytes": "2599"
},
{
"name": "Eiffel",
"bytes": "2145"
},
{
"name": "Elixir",
"bytes": "4340"
},
{
"name": "Emacs Lisp",
"bytes": "5709"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "F#",
"bytes": "19156"
},
{
"name": "FORTRAN",
"bytes": "27879"
},
{
"name": "Factor",
"bytes": "10194"
},
{
"name": "Fancy",
"bytes": "2581"
},
{
"name": "Fantom",
"bytes": "25331"
},
{
"name": "GAP",
"bytes": "15760"
},
{
"name": "Gnuplot",
"bytes": "10376"
},
{
"name": "Go",
"bytes": "172"
},
{
"name": "Golo",
"bytes": "1649"
},
{
"name": "Gosu",
"bytes": "2853"
},
{
"name": "Groovy",
"bytes": "2586"
},
{
"name": "Haskell",
"bytes": "49593"
},
{
"name": "Haxe",
"bytes": "16812"
},
{
"name": "Hy",
"bytes": "7237"
},
{
"name": "IDL",
"bytes": "2098"
},
{
"name": "Idris",
"bytes": "2771"
},
{
"name": "Inform 7",
"bytes": "1944"
},
{
"name": "Ioke",
"bytes": "469"
},
{
"name": "Isabelle",
"bytes": "21392"
},
{
"name": "Jasmin",
"bytes": "9428"
},
{
"name": "Java",
"bytes": "81613"
},
{
"name": "JavaScript",
"bytes": "14143"
},
{
"name": "Julia",
"bytes": "27687"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LSL",
"bytes": "160"
},
{
"name": "Lasso",
"bytes": "18650"
},
{
"name": "LiveScript",
"bytes": "972"
},
{
"name": "Logos",
"bytes": "306"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "76274"
},
{
"name": "Mathematica",
"bytes": "191"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "MoonScript",
"bytes": "14862"
},
{
"name": "Nemerle",
"bytes": "1517"
},
{
"name": "Nimrod",
"bytes": "37191"
},
{
"name": "Nit",
"bytes": "55581"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "3385"
},
{
"name": "Objective-J",
"bytes": "15340"
},
{
"name": "Opa",
"bytes": "172"
},
{
"name": "OpenEdge ABL",
"bytes": "318"
},
{
"name": "PAWN",
"bytes": "6555"
},
{
"name": "PHP",
"bytes": "17354"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Pascal",
"bytes": "84519"
},
{
"name": "Perl",
"bytes": "3611"
},
{
"name": "Perl6",
"bytes": "49676"
},
{
"name": "PigLatin",
"bytes": "6657"
},
{
"name": "Pike",
"bytes": "8479"
},
{
"name": "PowerShell",
"bytes": "6932"
},
{
"name": "Prolog",
"bytes": "738"
},
{
"name": "Puppet",
"bytes": "130"
},
{
"name": "Python",
"bytes": "6272729"
},
{
"name": "R",
"bytes": "4057"
},
{
"name": "Racket",
"bytes": "11341"
},
{
"name": "Rebol",
"bytes": "1887"
},
{
"name": "Red",
"bytes": "10536"
},
{
"name": "Ruby",
"bytes": "91403"
},
{
"name": "Rust",
"bytes": "6788"
},
{
"name": "Scala",
"bytes": "730"
},
{
"name": "Scheme",
"bytes": "47137"
},
{
"name": "Scilab",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "121510"
},
{
"name": "ShellSession",
"bytes": "320"
},
{
"name": "Smalltalk",
"bytes": "156665"
},
{
"name": "SourcePawn",
"bytes": "130"
},
{
"name": "Standard ML",
"bytes": "36869"
},
{
"name": "Swift",
"bytes": "2035"
},
{
"name": "SystemVerilog",
"bytes": "265"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "VHDL",
"bytes": "4446"
},
{
"name": "VimL",
"bytes": "16922"
},
{
"name": "Visual Basic",
"bytes": "17210"
},
{
"name": "XQuery",
"bytes": "4289"
},
{
"name": "XSLT",
"bytes": "755"
},
{
"name": "Xtend",
"bytes": "727"
},
{
"name": "Zephir",
"bytes": "485"
},
{
"name": "eC",
"bytes": "26388"
},
{
"name": "nesC",
"bytes": "23697"
},
{
"name": "xBase",
"bytes": "3349"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
class Site(object):
"""The site supported object which could be mounted to app instance.
:param name: the name of the supported site.
"""
def __init__(self, name):
self.name = name
self.actions = []
def record_action(self, method_name, *args, **kwargs):
"""Record the method-calling action.
The actions expect to be played on an target object.
:param method_name: the name of called method.
:param args: the general arguments for calling method.
:param kwargs: the keyword arguments for calling method.
"""
self.actions.append((method_name, args, kwargs))
def play_actions(self, target):
"""Play record actions on the target object.
:param target: the target which recive all record actions, is a brown
ant app instance normally.
:type target: :class:`~brownant.app.Brownant`
"""
for method_name, args, kwargs in self.actions:
method = getattr(target, method_name)
method(*args, **kwargs)
def route(self, host, rule, **options):
"""The decorator to register wrapped function as the brown ant app.
All optional parameters of this method are compatible with the
:meth:`~brownant.app.Brownant.add_url_rule`.
Registered functions or classes must be import-able with its qualified
name. It is different from the :class:`~flask.Flask`, but like a
lazy-loading mode. Registered objects only be loaded before the first
using.
The right way::
@site.route("www.example.com", "/item/<int:item_id>")
def spam(request, item_id):
pass
The wrong way::
def egg():
# the function could not be imported by its qualified name
@site.route("www.example.com", "/item/<int:item_id>")
def spam(request, item_id):
pass
egg()
:param host: the limited host name.
:param rule: the URL path rule as string.
:param options: the options to be forwarded to the
:class:`werkzeug.routing.Rule` object.
"""
def decorator(func):
endpoint = "{func.__module__}:{func.__name__}".format(func=func)
self.record_action("add_url_rule", host, rule, endpoint, **options)
return func
return decorator
|
{
"content_hash": "7881fd4f644ac2468352c9794f5f85a0",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 79,
"avg_line_length": 35.125,
"alnum_prop": 0.5923289837880585,
"repo_name": "douban/brownant",
"id": "82a40641f4cc579a9fda090352dbf718bf31437a",
"size": "2529",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "brownant/site.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "46526"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from django.template import defaultfilters
from django.utils.translation import ugettext as _
from django.utils.timezone import is_aware, utc
from .. import register
@register.filter(expects_localtime=True)
def shortnaturaltime(value):
"""
now, 1s, 1m, 1h, 1 Ene, 1 Ene 2012
"""
tz = utc if is_aware(value) else None
now = datetime.now(tz)
if value > now: # Future
return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M \'y')}
delta = now - value
if delta.days:
if defaultfilters.date(now, 'y') == defaultfilters.date(value, 'y'):
return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M')}
return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M \'y')}
if not delta.seconds:
return _('now')
count = delta.seconds
if count < 60:
return _('%(count)ss') % {'count': count}
count //= 60
if count < 60:
return _('%(count)sm') % {'count': count}
count //= 60
return _('%(count)sh') % {'count': count}
|
{
"content_hash": "213913a121ee0b6249be77ab13866f31",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 77,
"avg_line_length": 26.317073170731707,
"alnum_prop": 0.597775718257646,
"repo_name": "Si-elegans/Web-based_GUI_Tools",
"id": "93b7272d6e7ce68fcc2dba875b3ac7524ac1ee12",
"size": "1103",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spirit/templatetags/tags/utils/time.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "183432"
},
{
"name": "HTML",
"bytes": "821815"
},
{
"name": "JavaScript",
"bytes": "5240621"
},
{
"name": "Python",
"bytes": "2130547"
}
],
"symlink_target": ""
}
|
from sqlalchemy import Column, String, Integer
from sqlalchemy.ext.declarative import declarative_base
from db import engine
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
openid = Column(String(50), primary_key=True)
cardnum = Column(String(50), nullable=False)
uuid = Column(String(50), nullable=False)
state = Column(Integer, nullable=False)
def create_all():
Base.metadata.create_all(engine)
if __name__ == '__main__':
create_all()
|
{
"content_hash": "0666ba0d9e9b413a20eec0932e446c27",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 55,
"avg_line_length": 24.38095238095238,
"alnum_prop": 0.662109375,
"repo_name": "HeraldStudio/wechat",
"id": "935437c2c80b60c25ddb3ec6ca60dd5452ca971e",
"size": "607",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mod/models/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "36785"
},
{
"name": "JavaScript",
"bytes": "467551"
},
{
"name": "Python",
"bytes": "92494"
}
],
"symlink_target": ""
}
|
from struct import unpack
import pefile
from viper.common.out import print_warning, print_error
def get_unicode_string(buf, pos):
out = ''
for i in range(len(buf[pos:])):
if not (ord(buf[pos + i]) >= 32 and ord(buf[pos + i]) <= 126) and not (ord(buf[pos + i + 1]) >= 32 and ord(buf[pos + i + 1]) <= 126):
out += '\x00'
break
out += buf[pos + i]
if out == '':
return None
else:
return out.replace('\x00', '')
def rc4crypt(data, key):
x = 0
box = list(range(256))
for i in range(256):
x = (x + box[i] + ord(key[i % 6])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = []
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
return ''.join(out)
def extract_config(rawData):
try:
pe = pefile.PE(data=rawData)
try:
rt_string_idx = [
entry.id for entry in
pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE['RT_RCDATA'])
except ValueError:
return None
except AttributeError:
return None
rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx]
for entry in rt_string_directory.directory.entries:
if str(entry.name) == 'XTREME':
data_rva = entry.directory.entries[0].data.struct.OffsetToData
size = entry.directory.entries[0].data.struct.Size
data = pe.get_memory_mapped_image()[data_rva:data_rva + size]
return data
except:
return None
def v29(rawConfig):
config = {}
config["ID"] = get_unicode_string(rawConfig, 0x9e0)
config["Group"] = get_unicode_string(rawConfig, 0xa5a)
config["Version"] = get_unicode_string(rawConfig, 0xf2e)
config["Mutex"] = get_unicode_string(rawConfig, 0xfaa)
config["Install Dir"] = get_unicode_string(rawConfig, 0xb50)
config["Install Name"] = get_unicode_string(rawConfig, 0xad6)
config["HKLM"] = get_unicode_string(rawConfig, 0xc4f)
config["HKCU"] = get_unicode_string(rawConfig, 0xcc8)
config["Custom Reg Key"] = get_unicode_string(rawConfig, 0xdc0)
config["Custom Reg Name"] = get_unicode_string(rawConfig, 0xe3a)
config["Custom Reg Value"] = get_unicode_string(rawConfig, 0xa82)
config["ActiveX Key"] = get_unicode_string(rawConfig, 0xd42)
config["Injection"] = get_unicode_string(rawConfig, 0xbd2)
config["FTP Server"] = get_unicode_string(rawConfig, 0x111c)
config["FTP UserName"] = get_unicode_string(rawConfig, 0x1210)
config["FTP Password"] = get_unicode_string(rawConfig, 0x128a)
config["FTP Folder"] = get_unicode_string(rawConfig, 0x1196)
config["Domain1"] = str(get_unicode_string(rawConfig, 0x50) + ":" + str(unpack("<I", rawConfig[0:4])[0]))
config["Domain2"] = str(get_unicode_string(rawConfig, 0xca) + ":" + str(unpack("<I", rawConfig[4:8])[0]))
config["Domain3"] = str(get_unicode_string(rawConfig, 0x144) + ":" + str(unpack("<I", rawConfig[8:12])[0]))
config["Domain4"] = str(get_unicode_string(rawConfig, 0x1be) + ":" + str(unpack("<I", rawConfig[12:16])[0]))
config["Domain5"] = str(get_unicode_string(rawConfig, 0x238) + ":" + str(unpack("<I", rawConfig[16:20])[0]))
config["Domain6"] = str(get_unicode_string(rawConfig, 0x2b2) + ":" + str(unpack("<I", rawConfig[20:24])[0]))
config["Domain7"] = str(get_unicode_string(rawConfig, 0x32c) + ":" + str(unpack("<I", rawConfig[24:28])[0]))
config["Domain8"] = str(get_unicode_string(rawConfig, 0x3a6) + ":" + str(unpack("<I", rawConfig[28:32])[0]))
config["Domain9"] = str(get_unicode_string(rawConfig, 0x420) + ":" + str(unpack("<I", rawConfig[32:36])[0]))
config["Domain10"] = str(get_unicode_string(rawConfig, 0x49a) + ":" + str(unpack("<I", rawConfig[36:40])[0]))
config["Domain11"] = str(get_unicode_string(rawConfig, 0x514) + ":" + str(unpack("<I", rawConfig[40:44])[0]))
config["Domain12"] = str(get_unicode_string(rawConfig, 0x58e) + ":" + str(unpack("<I", rawConfig[44:48])[0]))
config["Domain13"] = str(get_unicode_string(rawConfig, 0x608) + ":" + str(unpack("<I", rawConfig[48:52])[0]))
config["Domain14"] = str(get_unicode_string(rawConfig, 0x682) + ":" + str(unpack("<I", rawConfig[52:56])[0]))
config["Domain15"] = str(get_unicode_string(rawConfig, 0x6fc) + ":" + str(unpack("<I", rawConfig[56:60])[0]))
config["Domain16"] = str(get_unicode_string(rawConfig, 0x776) + ":" + str(unpack("<I", rawConfig[60:64])[0]))
config["Domain17"] = str(get_unicode_string(rawConfig, 0x7f0) + ":" + str(unpack("<I", rawConfig[64:68])[0]))
config["Domain18"] = str(get_unicode_string(rawConfig, 0x86a) + ":" + str(unpack("<I", rawConfig[68:72])[0]))
config["Domain19"] = str(get_unicode_string(rawConfig, 0x8e4) + ":" + str(unpack("<I", rawConfig[72:76])[0]))
config["Domain20"] = str(get_unicode_string(rawConfig, 0x95e) + ":" + str(unpack("<I", rawConfig[76:80])[0]))
return config
def v32(rawConfig):
config = {}
config["ID"] = get_unicode_string(rawConfig, 0x1b4)
config["Group"] = get_unicode_string(rawConfig, 0x1ca)
config["Version"] = get_unicode_string(rawConfig, 0x2bc)
config["Mutex"] = get_unicode_string(rawConfig, 0x2d4)
config["Install Dir"] = get_unicode_string(rawConfig, 0x1f8)
config["Install Name"] = get_unicode_string(rawConfig, 0x1e2)
config["HKLM"] = get_unicode_string(rawConfig, 0x23a)
config["HKCU"] = get_unicode_string(rawConfig, 0x250)
config["ActiveX Key"] = get_unicode_string(rawConfig, 0x266)
config["Injection"] = get_unicode_string(rawConfig, 0x216)
config["FTP Server"] = get_unicode_string(rawConfig, 0x35e)
config["FTP UserName"] = get_unicode_string(rawConfig, 0x402)
config["FTP Password"] = get_unicode_string(rawConfig, 0x454)
config["FTP Folder"] = get_unicode_string(rawConfig, 0x3b0)
config["Domain1"] = str(get_unicode_string(rawConfig, 0x14) + ":" + str(unpack("<I", rawConfig[0:4])[0]))
config["Domain2"] = str(get_unicode_string(rawConfig, 0x66) + ":" + str(unpack("<I", rawConfig[4:8])[0]))
config["Domain3"] = str(get_unicode_string(rawConfig, 0xb8) + ":" + str(unpack("<I", rawConfig[8:12])[0]))
config["Domain4"] = str(get_unicode_string(rawConfig, 0x10a) + ":" + str(unpack("<I", rawConfig[12:16])[0]))
config["Domain5"] = str(get_unicode_string(rawConfig, 0x15c) + ":" + str(unpack("<I", rawConfig[16:20])[0]))
config["Msg Box Title"] = get_unicode_string(rawConfig, 0x50c)
config["Msg Box Text"] = get_unicode_string(rawConfig, 0x522)
return config
def v35(config_raw):
config = {}
config['ID'] = get_unicode_string(config_raw, 0x1b4)
config['Group'] = get_unicode_string(config_raw, 0x1ca)
config['Version'] = get_unicode_string(config_raw, 0x2d8)
config['Mutex'] = get_unicode_string(config_raw, 0x2f0)
config['Install Dir'] = get_unicode_string(config_raw, 0x1f8)
config['Install Name'] = get_unicode_string(config_raw, 0x1e2)
config['HKLM'] = get_unicode_string(config_raw, 0x23a)
config['HKCU'] = get_unicode_string(config_raw, 0x250)
config['ActiveX Key'] = get_unicode_string(config_raw, 0x266)
config['Injection'] = get_unicode_string(config_raw, 0x216)
config['FTP Server'] = get_unicode_string(config_raw, 0x380)
config['FTP UserName'] = get_unicode_string(config_raw, 0x422)
config['FTP Password'] = get_unicode_string(config_raw, 0x476)
config['FTP Folder'] = get_unicode_string(config_raw, 0x3d2)
config['Domain1'] = str(get_unicode_string(config_raw, 0x14) + ':' + str(unpack('<I', config_raw[0:4])[0]))
config['Domain2'] = str(get_unicode_string(config_raw, 0x66) + ':' + str(unpack('<I', config_raw[4:8])[0]))
config['Domain3'] = str(get_unicode_string(config_raw, 0xb8) + ':' + str(unpack('<I', config_raw[8:12])[0]))
config['Domain4'] = str(get_unicode_string(config_raw, 0x10a) + ':' + str(unpack('<I', config_raw[12:16])[0]))
config['Domain5'] = str(get_unicode_string(config_raw, 0x15c) + ':' + str(unpack('<I', config_raw[16:20])[0]))
config['Msg Box Title'] = get_unicode_string(config_raw, 0x52c)
config['Msg Box Text'] = get_unicode_string(config_raw, 0x542)
return config
def config(data):
key = 'C\x00O\x00N\x00F\x00I\x00G'
config_coded = extract_config(data)
config_raw = rc4crypt(config_coded, key)
# 1.3.x - Not implemented yet.
if len(config_raw) == 0xe10:
print_warning("Detected XtremeRAT 1.3.x, not supported yet")
config = None
# 2.9.x - Not a stable extract.
elif len(config_raw) == 0x1390 or len(config_raw) == 0x1392:
config = v29(config_raw)
# 3.1 & 3.2
elif len(config_raw) == 0x5Cc:
config = v32(config_raw)
# 3.5
elif len(config_raw) == 0x7f0:
config = v35(config_raw)
else:
print_error("No known XtremeRAT version detected")
config = None
return config
|
{
"content_hash": "0f936945dca2531b2b4a82f7394d78ad",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 141,
"avg_line_length": 51.140449438202246,
"alnum_prop": 0.6255080742612326,
"repo_name": "kevthehermit/viper",
"id": "6f67877ce594d87e0495216d3faf7980920e4b60",
"size": "9249",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "viper/modules/rats/xtreme.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1306"
},
{
"name": "JavaScript",
"bytes": "9295"
},
{
"name": "Makefile",
"bytes": "436"
},
{
"name": "Python",
"bytes": "1533848"
},
{
"name": "Smarty",
"bytes": "28213"
}
],
"symlink_target": ""
}
|
import os.path
"""This module configures that alphabet."""
def _load_alphabet(filename):
"""
Load a file containing the characters of the alphabet.
Every unique character contained in this file will be used as a symbol
in the alphabet.
"""
with open(filename, 'r') as f:
return list(set(f.read()))
def createalphabet(alphabetinput=None):
"""
Creates a sample alphabet containing printable ASCII characters
"""
if alphabetinput and os.path.isfile(alphabetinput):
return _load_alphabet(alphabetinput)
elif alphabetinput:
alpha = []
setlist = alphabetinput.split(',')
for alphaset in setlist:
a = int(alphaset.split('-')[0])
b = int(alphaset.split('-')[1])
for i in range(a, b):
alpha.append(str(unichr(i)))
return alpha
alpha = []
for i in range(32, 127):
alpha.append(str(unichr(i)))
return alpha
|
{
"content_hash": "7bba771479e86402eef00bccbe656134",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 74,
"avg_line_length": 30.0625,
"alnum_prop": 0.6091476091476091,
"repo_name": "GeorgeArgyros/symautomata",
"id": "f6bb265e5ef789073ed06838eb18946097d4b742",
"size": "962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "symautomata/alphabet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "57"
},
{
"name": "Python",
"bytes": "224672"
}
],
"symlink_target": ""
}
|
import unittest
import pickle
from urllib3.exceptions import (HTTPError, MaxRetryError, LocationParseError,
ClosedPoolError, EmptyPoolError,
HostChangedError, ReadTimeoutError,
ConnectTimeoutError, HeaderParsingError)
from urllib3.connectionpool import HTTPConnectionPool
class TestPickle(unittest.TestCase):
def verify_pickling(self, item):
return pickle.loads(pickle.dumps(item))
def test_exceptions(self):
assert self.verify_pickling(HTTPError(None))
assert self.verify_pickling(MaxRetryError(None, None, None))
assert self.verify_pickling(LocationParseError(None))
assert self.verify_pickling(ConnectTimeoutError(None))
def test_exceptions_with_objects(self):
assert self.verify_pickling(
HTTPError('foo'))
assert self.verify_pickling(
HTTPError('foo', IOError('foo')))
assert self.verify_pickling(
MaxRetryError(HTTPConnectionPool('localhost'), '/', None))
assert self.verify_pickling(
LocationParseError('fake location'))
assert self.verify_pickling(
ClosedPoolError(HTTPConnectionPool('localhost'), None))
assert self.verify_pickling(
EmptyPoolError(HTTPConnectionPool('localhost'), None))
assert self.verify_pickling(
HostChangedError(HTTPConnectionPool('localhost'), '/', None))
assert self.verify_pickling(
ReadTimeoutError(HTTPConnectionPool('localhost'), '/', None))
class TestFormat(unittest.TestCase):
def test_header_parsing_errors(self):
hpe = HeaderParsingError('defects', 'unparsed_data')
self.assertTrue('defects' in str(hpe))
self.assertTrue('unparsed_data' in str(hpe))
|
{
"content_hash": "c80e304c614fa9e92f62d1df53b196d6",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 77,
"avg_line_length": 34.77358490566038,
"alnum_prop": 0.6559956592512208,
"repo_name": "Lukasa/urllib3",
"id": "beb538b376f7c1b21fb65c6df5c878afc6bf6a7d",
"size": "1843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1127"
},
{
"name": "Python",
"bytes": "637159"
},
{
"name": "Shell",
"bytes": "3078"
}
],
"symlink_target": ""
}
|
class Stack:
def __init__(self):
self.items = []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop(-1)
def peak(self):
return self.items[-1]
def isEmpty(self):
return self.items == []
def size(self):
return len(self.items)
def remove(self, item):
return self.items.remove(item)
# Removes non-tag elements from a line in the text file
def filterTags(st):
s = []
while st.find('<') != -1 and st.find('>') != -1:
i1 = st.find('<')
i2 = st.find('>')
if 'meta' in st[i1:i2+1]:
break
elif 'br' in st[i1:i2+1]:
break
elif 'hr' in st[i1:i2+1]:
break
else:
s.append(st[i1:i2+1])
st = st.replace('<', '', 1)
st = st.replace('>', '', 1)
st = st.strip()
return s
# Gets a list of tags found that are not exceptions
def getValidTags(file):
text = open(file, 'r')
tag_dict = {}
for line in text:
st = filterTags(line)
for tag in st:
if tag in tag_dict:
tag_dict[tag] += 1
else:
tag_dict[tag] = 1
ValidTags = list(tag_dict.keys())
text.close()
return ValidTags
# Gets all tags in order they are encountered, feeds into stack
def getTags(file):
text = open(file, 'r')
st = []
for line in text:
st += filterTags(line)
text.close()
return st
def main():
stack = Stack()
VALIDTAGS = getValidTags('htmlfile.txt')
tags = getTags('htmlfile.txt')
EXCEPTIONS = ['<meta>', '<br/>', '<hr>', '<td>']
# Iterate through the list of tags and put them on the stack
for tag in tags:
# When an end tag is found, check for its compliment and remove them both from the stack
if '/' in str(tag):
complement = tag.replace('/', '')
complement = complement.strip()
if complement in stack.items:
stack.items.remove(complement)
print('Tag is: ', tag, ':', 'Matches: stack is now', stack.items)
# Push non end tags onto the stack
else:
stack.push(tag)
print('Tag is: ', tag, ':', 'Pushed: stack is now \n', stack.items)
# Once all tags are gone through check if the stack is empty or not
if len(stack.items) > 0:
print('Processing complete. Unmatched tags remain on stack: \n', stack.items)
elif len(stack.items) < 1:
print('Processing complete. No mismatches found.\n')
# List of valid tags and exceptions
print('List of Valid Tags:\n')
print(VALIDTAGS, '\n')
print('List of Exceptions:\n')
print(EXCEPTIONS, '\n')
main()
|
{
"content_hash": "3bb832c5913367e17a4c30f0d0fee9f5",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 100,
"avg_line_length": 29.428571428571427,
"alnum_prop": 0.5218446601941747,
"repo_name": "Etwigg/Examples",
"id": "e5391d1c6d46cbcf1193fda4aefa29205a28eb2a",
"size": "3263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "HTML Tag Validator/htmlChecker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "410509"
},
{
"name": "C++",
"bytes": "134770"
},
{
"name": "CSS",
"bytes": "25064"
},
{
"name": "HTML",
"bytes": "60491"
},
{
"name": "JavaScript",
"bytes": "111492"
},
{
"name": "PHP",
"bytes": "15593"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "6163850"
},
{
"name": "Ruby",
"bytes": "161"
},
{
"name": "Tcl",
"bytes": "1285363"
}
],
"symlink_target": ""
}
|
"""Auth providers for Home Assistant."""
import importlib
import logging
import types
from typing import Any, Dict, List, Optional
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from ..auth_store import AuthStore
from ..const import MFA_SESSION_EXPIRATION
from ..models import Credentials, User, UserMeta
_LOGGER = logging.getLogger(__name__)
DATA_REQS = "auth_prov_reqs_processed"
AUTH_PROVIDERS = Registry()
AUTH_PROVIDER_SCHEMA = vol.Schema(
{
vol.Required(CONF_TYPE): str,
vol.Optional(CONF_NAME): str,
# Specify ID if you have two auth providers for same type.
vol.Optional(CONF_ID): str,
},
extra=vol.ALLOW_EXTRA,
)
class AuthProvider:
"""Provider of user authentication."""
DEFAULT_TITLE = "Unnamed auth provider"
def __init__(
self, hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
) -> None:
"""Initialize an auth provider."""
self.hass = hass
self.store = store
self.config = config
@property
def id(self) -> Optional[str]:
"""Return id of the auth provider.
Optional, can be None.
"""
return self.config.get(CONF_ID)
@property
def type(self) -> str:
"""Return type of the provider."""
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth provider."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
@property
def support_mfa(self) -> bool:
"""Return whether multi-factor auth supported by the auth provider."""
return True
async def async_credentials(self) -> List[Credentials]:
"""Return all credentials of this provider."""
users = await self.store.async_get_users()
return [
credentials
for user in users
for credentials in user.credentials
if (
credentials.auth_provider_type == self.type
and credentials.auth_provider_id == self.id
)
]
@callback
def async_create_credentials(self, data: Dict[str, str]) -> Credentials:
"""Create credentials."""
return Credentials(
auth_provider_type=self.type, auth_provider_id=self.id, data=data
)
# Implement by extending class
async def async_login_flow(self, context: Optional[Dict]) -> "LoginFlow":
"""Return the data flow for logging in with auth provider.
Auth provider should extend LoginFlow and return an instance.
"""
raise NotImplementedError
async def async_get_or_create_credentials(
self, flow_result: Dict[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
raise NotImplementedError
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Will be used to populate info when creating a new user.
"""
raise NotImplementedError
async def async_initialize(self) -> None:
"""Initialize the auth provider."""
async def auth_provider_from_config(
hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
) -> AuthProvider:
"""Initialize an auth provider from a config."""
provider_name = config[CONF_TYPE]
module = await load_auth_provider_module(hass, provider_name)
try:
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for auth provider %s: %s",
provider_name,
humanize_error(config, err),
)
raise
return AUTH_PROVIDERS[provider_name](hass, store, config) # type: ignore
async def load_auth_provider_module(
hass: HomeAssistant, provider: str
) -> types.ModuleType:
"""Load an auth provider."""
try:
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
except ImportError as err:
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
raise HomeAssistantError(
f"Unable to load auth provider {provider}: {err}"
) from err
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
return module
processed = hass.data.get(DATA_REQS)
if processed is None:
processed = hass.data[DATA_REQS] = set()
elif provider in processed:
return module
# https://github.com/python/mypy/issues/1424
reqs = module.REQUIREMENTS # type: ignore
await requirements.async_process_requirements(
hass, f"auth provider {provider}", reqs
)
processed.add(provider)
return module
class LoginFlow(data_entry_flow.FlowHandler):
"""Handler for the login flow."""
def __init__(self, auth_provider: AuthProvider) -> None:
"""Initialize the login flow."""
self._auth_provider = auth_provider
self._auth_module_id: Optional[str] = None
self._auth_manager = auth_provider.hass.auth
self.available_mfa_modules: Dict[str, str] = {}
self.created_at = dt_util.utcnow()
self.invalid_mfa_times = 0
self.user: Optional[User] = None
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the first step of login flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return await self.async_finish(flow_result) if login init step pass.
"""
raise NotImplementedError
async def async_step_select_mfa_module(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of select mfa module."""
errors = {}
if user_input is not None:
auth_module = user_input.get("multi_factor_auth_module")
if auth_module in self.available_mfa_modules:
self._auth_module_id = auth_module
return await self.async_step_mfa()
errors["base"] = "invalid_auth_module"
if len(self.available_mfa_modules) == 1:
self._auth_module_id = list(self.available_mfa_modules)[0]
return await self.async_step_mfa()
return self.async_show_form(
step_id="select_mfa_module",
data_schema=vol.Schema(
{"multi_factor_auth_module": vol.In(self.available_mfa_modules)}
),
errors=errors,
)
async def async_step_mfa(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of mfa validation."""
assert self.user
errors = {}
assert self._auth_module_id is not None
auth_module = self._auth_manager.get_auth_mfa_module(self._auth_module_id)
if auth_module is None:
# Given an invalid input to async_step_select_mfa_module
# will show invalid_auth_module error
return await self.async_step_select_mfa_module(user_input={})
if user_input is None and hasattr(
auth_module, "async_initialize_login_mfa_step"
):
try:
await auth_module.async_initialize_login_mfa_step( # type: ignore
self.user.id
)
except HomeAssistantError:
_LOGGER.exception("Error initializing MFA step")
return self.async_abort(reason="unknown_error")
if user_input is not None:
expires = self.created_at + MFA_SESSION_EXPIRATION
if dt_util.utcnow() > expires:
return self.async_abort(reason="login_expired")
result = await auth_module.async_validate(self.user.id, user_input)
if not result:
errors["base"] = "invalid_code"
self.invalid_mfa_times += 1
if self.invalid_mfa_times >= auth_module.MAX_RETRY_TIME > 0:
return self.async_abort(reason="too_many_retry")
if not errors:
return await self.async_finish(self.user)
description_placeholders: Dict[str, Optional[str]] = {
"mfa_module_name": auth_module.name,
"mfa_module_id": auth_module.id,
}
return self.async_show_form(
step_id="mfa",
data_schema=auth_module.input_schema,
description_placeholders=description_placeholders,
errors=errors,
)
async def async_finish(self, flow_result: Any) -> Dict:
"""Handle the pass of login flow."""
return self.async_create_entry(title=self._auth_provider.name, data=flow_result)
|
{
"content_hash": "18fd357b84830eef7b507fc48202f843",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 88,
"avg_line_length": 33.19565217391305,
"alnum_prop": 0.6162409954158481,
"repo_name": "sdague/home-assistant",
"id": "1fe59346b00804bc7aeaeda0ffb79e4702b0ad25",
"size": "9162",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/auth/providers/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "27869189"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.