desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
':param factory: Stream-based factory to be wrapped.
:type factory: A subclass of ``twisted.internet.protocol.Factory``
:param url: WebSocket URL of the server this client factory will connect to.
:type url: unicode'
| def __init__(self, factory, url, reactor=None, enableCompression=True, autoFragmentSize=0, subprotocol=None):
| self._factory = factory
self._subprotocols = [u'binary', u'base64']
if subprotocol:
self._subprotocols.append(subprotocol)
WebSocketClientFactory.__init__(self, url=url, reactor=reactor, protocols=self._subprotocols)
self.setProtocolOptions(autoFragmentSize=autoFragmentSize)
self.setProtocolOptions(failByDrop=False)
if enableCompression:
offers = [PerMessageDeflateOffer()]
self.setProtocolOptions(perMessageCompressionOffers=offers)
def accept(response):
if isinstance(response, PerMessageDeflateResponse):
return PerMessageDeflateResponseAccept(response)
self.setProtocolOptions(perMessageCompressionAccept=accept)
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializers: A list of WAMP serializers to use (or ``None``
for all available serializers).
:type serializers: list of objects implementing
:class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, *args, **kwargs):
| serializers = kwargs.pop('serializers', None)
websocket.WampWebSocketServerFactory.__init__(self, factory, serializers)
kwargs['protocols'] = self._protocols
WebSocketServerFactory.__init__(self, *args, **kwargs)
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializer: The WAMP serializer to use (or ``None`` for
"best" serializer, chosen as the first serializer available from
this list: CBOR, MessagePack, UBJSON, JSON).
:type serializer: object implementing :class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, *args, **kwargs):
| serializers = kwargs.pop('serializers', None)
websocket.WampWebSocketClientFactory.__init__(self, factory, serializers)
kwargs['protocols'] = self._protocols
WebSocketClientFactory.__init__(self, *args, **kwargs)
|
':param url: The WebSocket URL of the WAMP router to connect to (e.g. `ws://somehost.com:8090/somepath`)
:type url: str
:param realm: The WAMP realm to join the application session to.
:type realm: str
:param extra: Optional extra configuration to forward to the application component.
:type extra: dict
:param serializers: A list of WAMP serializers to use (or None for default serializers).
Serializers must implement :class:`autobahn.wamp.interfaces.ISerializer`.
:type serializers: list
:param ssl: (Optional). If specified this should be an
instance suitable to pass as ``sslContextFactory`` to
:class:`twisted.internet.endpoints.SSL4ClientEndpoint`` such
as :class:`twisted.internet.ssl.CertificateOptions`. Leaving
it as ``None`` will use the result of calling Twisted\'s
:meth:`twisted.internet.ssl.platformTrust` which tries to use
your distribution\'s CA certificates.
:type ssl: :class:`twisted.internet.ssl.CertificateOptions`
:param proxy: Explicit proxy server to use; a dict with ``host`` and ``port`` keys
:type proxy: dict or None
:param headers: Additional headers to send (only applies to WAMP-over-WebSocket).
:type headers: dict'
| def __init__(self, url, realm=None, extra=None, serializers=None, ssl=None, proxy=None, headers=None):
| assert (type(url) == six.text_type)
assert ((realm is None) or (type(realm) == six.text_type))
assert ((extra is None) or (type(extra) == dict))
assert ((headers is None) or (type(headers) == dict))
assert ((proxy is None) or (type(proxy) == dict))
self.url = url
self.realm = realm
self.extra = (extra or dict())
self.serializers = serializers
self.ssl = ssl
self.proxy = proxy
self.headers = headers
self._client_service = None
self._connect_successes = 0
|
'Stop reconnecting, if auto-reconnecting was enabled.'
| @public
def stop(self):
| self.log.debug('{klass}.stop()', klass=self.__class__.__name__)
if self._client_service:
return self._client_service.stopService()
else:
return succeed(None)
|
'Run the application component.
:param make: A factory that produces instances of :class:`autobahn.twisted.wamp.ApplicationSession`
when called with an instance of :class:`autobahn.wamp.types.ComponentConfig`.
:type make: callable
:param start_reactor: When ``True`` (the default) this method starts
the Twisted reactor and doesn\'t return until the reactor
stops. If there are any problems starting the reactor or
connect()-ing, we stop the reactor and raise the exception
back to the caller.
:returns: None is returned, unless you specify
``start_reactor=False`` in which case the Deferred that
connect() returns is returned; this will callback() with
an IProtocol instance, which will actually be an instance
of :class:`WampWebSocketClientProtocol`'
| @public
def run(self, make, start_reactor=True, auto_reconnect=False, log_level='info'):
| if start_reactor:
from twisted.internet import reactor
txaio.use_twisted()
txaio.config.loop = reactor
txaio.start_logging(level=log_level)
if callable(make):
def create():
cfg = ComponentConfig(self.realm, self.extra)
try:
session = make(cfg)
except Exception:
self.log.failure('ApplicationSession could not be instantiated: {log_failure.value}')
if (start_reactor and reactor.running):
reactor.stop()
raise
else:
return session
else:
create = make
if self.url.startswith(u'rs'):
(isSecure, host, port) = parse_rs_url(self.url)
serializer = (self.serializers[0] if self.serializers else None)
transport_factory = WampRawSocketClientFactory(create, serializer=serializer)
else:
(isSecure, host, port, resource, path, params) = parse_ws_url(self.url)
transport_factory = WampWebSocketClientFactory(create, url=self.url, serializers=self.serializers, proxy=self.proxy, headers=self.headers)
offers = [PerMessageDeflateOffer()]
def accept(response):
if isinstance(response, PerMessageDeflateResponse):
return PerMessageDeflateResponseAccept(response)
transport_factory.setProtocolOptions(maxFramePayloadSize=1048576, maxMessagePayloadSize=1048576, autoFragmentSize=65536, failByDrop=False, openHandshakeTimeout=2.5, closeHandshakeTimeout=1.0, tcpNoDelay=True, autoPingInterval=10.0, autoPingTimeout=5.0, autoPingSize=4, perMessageCompressionOffers=offers, perMessageCompressionAccept=accept)
transport_factory.noisy = False
context_factory = None
if (self.ssl is not None):
if (not isSecure):
raise RuntimeError(('ssl= argument value passed to %s conflicts with the "ws:" prefix of the url argument. Did you mean to use "wss:"?' % self.__class__.__name__))
context_factory = self.ssl
elif isSecure:
from twisted.internet.ssl import optionsForClientTLS
context_factory = optionsForClientTLS(host)
from twisted.internet import reactor
if (self.proxy is not None):
from twisted.internet.endpoints import TCP4ClientEndpoint
client = TCP4ClientEndpoint(reactor, self.proxy['host'], self.proxy['port'])
transport_factory.contextFactory = context_factory
elif isSecure:
from twisted.internet.endpoints import SSL4ClientEndpoint
assert (context_factory is not None)
client = SSL4ClientEndpoint(reactor, host, port, context_factory)
else:
from twisted.internet.endpoints import TCP4ClientEndpoint
client = TCP4ClientEndpoint(reactor, host, port)
def cleanup(proto):
if (hasattr(proto, '_session') and (proto._session is not None)):
if proto._session.is_attached():
return proto._session.leave()
elif proto._session.is_connected():
return proto._session.disconnect()
def init_proto(proto):
self._connect_successes += 1
reactor.addSystemEventTrigger('before', 'shutdown', cleanup, proto)
return proto
use_service = False
if auto_reconnect:
try:
from twisted.application.internet import ClientService
from twisted.application.internet import backoffPolicy
use_service = True
except ImportError:
use_service = False
if use_service:
self.log.debug('using t.a.i.ClientService')
default_retry = backoffPolicy()
if False:
def retry(failed_attempts):
if (self._connect_successes > 0):
return default_retry(failed_attempts)
else:
self.stop()
return 100000000000000
else:
retry = default_retry
self._client_service = ClientService(client, transport_factory, retryPolicy=retry)
self._client_service.startService()
d = self._client_service.whenConnected()
else:
self.log.debug('using t.i.e.connect()')
d = client.connect(transport_factory)
d.addCallback(init_proto)
if start_reactor:
class ErrorCollector(object, ):
exception = None
def __call__(self, failure):
self.exception = failure.value
reactor.stop()
connect_error = ErrorCollector()
d.addErrback(connect_error)
reactor.run()
if connect_error.exception:
raise connect_error.exception
else:
return d
|
':param config: The component configuration.
:type config: Instance of :class:`autobahn.wamp.types.ComponentConfig`
:param app: The application this session is for.
:type app: Instance of :class:`autobahn.twisted.wamp.Application`.'
| def __init__(self, config, app):
| ApplicationSession.__init__(self, config)
self.app = app
|
'Implements :func:`autobahn.wamp.interfaces.ISession.onConnect`'
| @inlineCallbacks
def onConnect(self):
| (yield self.app._fire_signal('onconnect'))
self.join(self.config.realm)
|
'Implements :func:`autobahn.wamp.interfaces.ISession.onJoin`'
| @inlineCallbacks
def onJoin(self, details):
| for (uri, proc) in self.app._procs:
(yield self.register(proc, uri))
for (uri, handler) in self.app._handlers:
(yield self.subscribe(handler, uri))
(yield self.app._fire_signal('onjoined'))
|
'Implements :func:`autobahn.wamp.interfaces.ISession.onLeave`'
| @inlineCallbacks
def onLeave(self, details):
| (yield self.app._fire_signal('onleave'))
self.disconnect()
|
'Implements :func:`autobahn.wamp.interfaces.ISession.onDisconnect`'
| @inlineCallbacks
def onDisconnect(self):
| (yield self.app._fire_signal('ondisconnect'))
|
':param prefix: The application URI prefix to use for procedures and topics,
e.g. ``"com.example.myapp"``.
:type prefix: unicode'
| def __init__(self, prefix=None):
| self._prefix = prefix
self._procs = []
self._handlers = []
self._signals = {}
self.session = None
|
'Factory creating a WAMP application session for the application.
:param config: Component configuration.
:type config: Instance of :class:`autobahn.wamp.types.ComponentConfig`
:returns: obj -- An object that derives of
:class:`autobahn.twisted.wamp.ApplicationSession`'
| def __call__(self, config):
| assert (self.session is None)
self.session = _ApplicationSession(config, self)
return self.session
|
'Run the application.
:param url: The URL of the WAMP router to connect to.
:type url: unicode
:param realm: The realm on the WAMP router to join.
:type realm: unicode'
| def run(self, url=u'ws://localhost:8080/ws', realm=u'realm1', start_reactor=True):
| runner = ApplicationRunner(url, realm)
return runner.run(self.__call__, start_reactor)
|
'Decorator exposing a function as a remote callable procedure.
The first argument of the decorator should be the URI of the procedure
to register under.
:Example:
.. code-block:: python
@app.register(\'com.myapp.add2\')
def add2(a, b):
return a + b
Above function can then be called remotely over WAMP using the URI `com.myapp.add2`
the function was registered under.
If no URI is given, the URI is constructed from the application URI prefix
and the Python function name.
:Example:
.. code-block:: python
app = Application(\'com.myapp\')
# implicit URI will be \'com.myapp.add2\'
@app.register()
def add2(a, b):
return a + b
If the function `yields` (is a co-routine), the `@inlineCallbacks` decorator
will be applied automatically to it. In that case, if you wish to return something,
you should use `returnValue`:
:Example:
.. code-block:: python
from twisted.internet.defer import returnValue
@app.register(\'com.myapp.add2\')
def add2(a, b):
res = yield stuff(a, b)
returnValue(res)
:param uri: The URI of the procedure to register under.
:type uri: unicode'
| def register(self, uri=None):
| def decorator(func):
if uri:
_uri = uri
else:
assert (self._prefix is not None)
_uri = '{0}.{1}'.format(self._prefix, func.__name__)
if inspect.isgeneratorfunction(func):
func = inlineCallbacks(func)
self._procs.append((_uri, func))
return func
return decorator
|
'Decorator attaching a function as an event handler.
The first argument of the decorator should be the URI of the topic
to subscribe to. If no URI is given, the URI is constructed from
the application URI prefix and the Python function name.
If the function yield, it will be assumed that it\'s an asynchronous
process and inlineCallbacks will be applied to it.
:Example:
.. code-block:: python
@app.subscribe(\'com.myapp.topic1\')
def onevent1(x, y):
print("got event on topic1", x, y)
:param uri: The URI of the topic to subscribe to.
:type uri: unicode'
| def subscribe(self, uri=None):
| def decorator(func):
if uri:
_uri = uri
else:
assert (self._prefix is not None)
_uri = '{0}.{1}'.format(self._prefix, func.__name__)
if inspect.isgeneratorfunction(func):
func = inlineCallbacks(func)
self._handlers.append((_uri, func))
return func
return decorator
|
'Decorator attaching a function as handler for application signals.
Signals are local events triggered internally and exposed to the
developer to be able to react to the application lifecycle.
If the function yield, it will be assumed that it\'s an asynchronous
coroutine and inlineCallbacks will be applied to it.
Current signals :
- `onjoined`: Triggered after the application session has joined the
realm on the router and registered/subscribed all procedures
and event handlers that were setup via decorators.
- `onleave`: Triggered when the application session leaves the realm.
.. code-block:: python
@app.signal(\'onjoined\')
def _():
# do after the app has join a realm
:param name: The name of the signal to watch.
:type name: unicode'
| def signal(self, name):
| def decorator(func):
if inspect.isgeneratorfunction(func):
func = inlineCallbacks(func)
self._signals.setdefault(name, []).append(func)
return func
return decorator
|
'Utility method to call all signal handlers for a given signal.
:param name: The signal name.
:type name: str'
| @inlineCallbacks
def _fire_signal(self, name, *args, **kwargs):
| for handler in self._signals.get(name, []):
try:
(yield handler(*args, **kwargs))
except Exception as e:
self.log.info('Warning: exception in signal handler swallowed: {err}', err=e)
|
'Patch ``name`` so that Twisted will grab a fake reactor instead of
a real one.'
| def patch_reactor(self, name, new_reactor):
| if hasattr(twisted.internet, name):
self.patch(twisted.internet, name, new_reactor)
else:
def _cleanup():
delattr(twisted.internet, name)
setattr(twisted.internet, name, new_reactor)
|
'Patch ``sys.modules`` so that Twisted believes there is no
installed reactor.'
| def patch_modules(self):
| old_modules = dict(sys.modules)
new_modules = dict(sys.modules)
del new_modules['twisted.internet.reactor']
def _cleanup():
sys.modules = old_modules
self.addCleanup(_cleanup)
sys.modules = new_modules
|
'``install_optimal_reactor`` will use the default reactor if it is
unable to detect the platform it is running on.'
| def test_unknown(self):
| reactor_mock = Mock()
self.patch_reactor('default', reactor_mock)
self.patch(sys, 'platform', 'unknown')
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
|
'``install_optimal_reactor`` will install KQueueReactor on
Darwin (OS X).'
| def test_mac(self):
| reactor_mock = Mock()
self.patch_reactor('kqreactor', reactor_mock)
self.patch(sys, 'platform', 'darwin')
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
|
'``install_optimal_reactor`` will install EPollReactor on Linux.'
| def test_linux(self):
| reactor_mock = Mock()
self.patch_reactor('epollreactor', reactor_mock)
self.patch(sys, 'platform', 'linux')
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
|
'A handshake from a client only supporting Hixie-76 will fail.'
| def test_handshake_fails(self):
| t = FakeTransport()
f = WebSocketServerFactory()
p = WebSocketServerProtocol()
p.factory = f
p.transport = t
http_request = 'GET /demo HTTP/1.1\r\nHost: example.com\r\nConnection: Upgrade\r\nSec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\nSec-WebSocket-Protocol: sample\r\nUpgrade: WebSocket\r\nSec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\nOrigin: http://example.com\r\n\r\n^n:ds[4U'
p.openHandshakeTimeout = 0
p._connectionMade()
p.data = http_request
p.processHandshake()
self.assertIn('HTTP/1.1 400', t._written)
self.assertIn('Hixie76 protocol not supported', t._written)
|
'Test the examples from the docs'
| def test_match_origin_documentation_example(self):
| self.factory.setProtocolOptions(allowedOrigins=['*://*.example.com:*'])
self.factory.isSecure = True
self.factory.port = 443
self.proto.data = '\r\n'.join(['GET /ws HTTP/1.1', 'Host: www.example.com', 'Sec-WebSocket-Version: 13', 'Origin: http://www.example.com', 'Sec-WebSocket-Extensions: permessage-deflate', 'Sec-WebSocket-Key: tXAxWFUqnhi86Ajj7dRY5g==', 'Connection: keep-alive, Upgrade', 'Upgrade: websocket', '\r\n'])
self.proto.consumeData()
self.assertFalse(self.proto.failHandshake.called, 'Handshake should have succeeded')
|
'All the example origins from RFC6454 (3.2.1)'
| def test_match_origin_examples(self):
| from autobahn.websocket.protocol import _is_same_origin, _url_to_origin
policy = wildcards2patterns(['*example.com:*'])
for url in ['http://example.com/', 'http://example.com:80/', 'http://example.com/path/file', 'http://example.com/;semi=true', '//example.com/', 'http://@example.com']:
self.assertTrue(_is_same_origin(_url_to_origin(url), 'http', 80, policy), url)
|
'All the example \'not-same\' origins from RFC6454 (3.2.1)'
| def test_match_origin_counter_examples(self):
| from autobahn.websocket.protocol import _is_same_origin, _url_to_origin
policy = wildcards2patterns(['example.com'])
for url in ['http://ietf.org/', 'http://example.org/', 'https://example.com/', 'http://example.com:8080/', 'http://www.example.com/']:
self.assertFalse(_is_same_origin(_url_to_origin(url), 'http', 80, policy))
|
'A client can connect to a server.'
| def test_handshake_succeeds(self):
| session_mock = Mock()
t = FakeTransport()
f = WampRawSocketClientFactory((lambda : session_mock))
p = WampRawSocketClientProtocol()
p.transport = t
p.factory = f
server_session_mock = Mock()
st = FakeTransport()
sf = WampRawSocketServerFactory((lambda : server_session_mock))
sp = WampRawSocketServerProtocol()
sp.transport = st
sp.factory = sf
sp.connectionMade()
p.connectionMade()
sp.dataReceived(t._written[0:1])
sp.dataReceived(t._written[1:4])
p.dataReceived(st._written)
session_mock.onOpen.assert_called_once_with(p)
server_session_mock.onOpen.assert_called_once_with(sp)
|
':param wsgiResource: The WSGI to serve as root resource.
:type wsgiResource: Instance of `twisted.web.wsgi.WSGIResource <http://twistedmatrix.com/documents/current/api/twisted.web.wsgi.WSGIResource.html>`_.
:param children: A dictionary with string keys constituting URL subpaths, and Twisted Web resources as values.
:type children: dict'
| def __init__(self, wsgiResource, children):
| Resource.__init__(self)
self._wsgiResource = wsgiResource
self.children = children
|
':param factory: An instance of :class:`autobahn.twisted.websocket.WebSocketServerFactory`.
:type factory: obj'
| def __init__(self, factory):
| self._factory = factory
|
'This resource cannot have children, hence this will always fail.'
| def getChildWithDefault(self, name, request):
| return NoResource('No such child resource.')
|
'Render the resource. This will takeover the transport underlying
the request, create a :class:`autobahn.twisted.websocket.WebSocketServerProtocol`
and let that do any subsequent communication.'
| def render(self, request):
| protocol = self._factory.buildProtocol(request.transport.getPeer())
if (not protocol):
request.setResponseCode(500)
return ''
(transport, request.channel.transport) = (request.channel.transport, None)
if isinstance(transport, ProtocolWrapper):
transport.wrappedProtocol = protocol
else:
transport.protocol = protocol
protocol.makeConnection(transport)
if hasattr(transport, '_networkProducer'):
transport._networkProducer.resumeProducing()
elif hasattr(transport, 'resumeProducing'):
transport.resumeProducing()
if PY3:
data = (((request.method + ' ') + request.uri) + ' HTTP/1.1\r\n')
for h in request.requestHeaders.getAllRawHeaders():
data += (((h[0] + ': ') + ','.join(h[1])) + '\r\n')
data += '\r\n'
data += request.content.read()
else:
data = ('%s %s HTTP/1.1\r\n' % (request.method, request.uri))
for h in request.requestHeaders.getAllRawHeaders():
data += ('%s: %s\r\n' % (h[0], ','.join(h[1])))
data += '\r\n'
protocol.dataReceived(data)
return NOT_DONE_YET
|
'Create and connect a WAMP-over-XXX transport.'
| def _connect_transport(self, loop, transport, session_factory):
| factory = _create_transport_factory(loop, transport, session_factory)
if (transport.endpoint[u'type'] == u'tcp'):
version = transport.endpoint.get(u'version', 4)
if (version not in [4, 6]):
raise ValueError('invalid IP version {} in client endpoint configuration'.format(version))
host = transport.endpoint[u'host']
if (type(host) != six.text_type):
raise ValueError('invalid type {} for host in client endpoint configuration'.format(type(host)))
port = transport.endpoint[u'port']
if (type(port) not in six.integer_types):
raise ValueError('invalid type {} for port in client endpoint configuration'.format(type(port)))
timeout = transport.endpoint.get(u'timeout', 10)
if (type(timeout) not in six.integer_types):
raise ValueError('invalid type {} for timeout in client endpoint configuration'.format(type(timeout)))
tls = transport.endpoint.get(u'tls', None)
tls_hostname = None
if tls:
if isinstance(tls, dict):
for k in tls.keys():
if (k not in [u'hostname', u'trust_root']):
raise ValueError("Invalid key '{}' in 'tls' config".format(k))
hostname = tls.get(u'hostname', host)
if (type(hostname) != six.text_type):
raise ValueError('invalid type {} for hostname in TLS client endpoint configuration'.format(hostname))
cert_fname = tls.get(u'trust_root', None)
tls_hostname = hostname
tls = True
if (cert_fname is not None):
tls = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=cert_fname)
elif isinstance(tls, ssl.SSLContext):
tls_hostname = host
elif (tls in [False, True]):
if tls:
tls_hostname = host
else:
raise RuntimeError('unknown type {} for "tls" configuration in transport'.format(type(tls)))
f = loop.create_connection(protocol_factory=factory, host=host, port=port, ssl=tls, server_hostname=tls_hostname)
time_f = asyncio.ensure_future(asyncio.wait_for(f, timeout=timeout))
return time_f
elif (transport.endpoint[u'type'] == u'unix'):
path = transport.endpoint[u'path']
timeout = int(transport.endpoint.get(u'timeout', 10))
f = loop.create_unix_connection(protocol_factory=factory, path=path)
time_f = asyncio.ensure_future(asyncio.wait_for(f, timeout=timeout))
return time_f
else:
assert False, 'should not arrive here'
|
'This starts the Component, which means it will start connecting
(and re-connecting) to its configured transports. A Component
runs until it is "done", which means one of:
- There was a "main" function defined, and it completed successfully;
- Something called ``.leave()`` on our session, and we left successfully;
- ``.stop()`` was called, and completed successfully;
- none of our transports were able to connect successfully (failure);
:returns: a Future which will resolve (to ``None``) when we are
"done" or with an error if something went wrong.'
| def start(self, loop=None):
| if (loop is None):
self.log.warn('Using default loop')
loop = asyncio.get_default_loop()
done_f = txaio.create_future()
transport_gen = itertools.cycle(self._transports)
f0 = self.fire('start', loop, self)
reconnect = [True]
def one_reconnect_loop(_):
self.log.debug('Entering re-connect loop')
if (not reconnect[0]):
return
transport = next(transport_gen)
if transport.can_reconnect():
delay = transport.next_delay()
self.log.debug('trying transport {transport_idx} using connect delay {transport_delay}', transport_idx=transport.idx, transport_delay=delay)
delay_f = asyncio.ensure_future(txaio.sleep(delay))
def actual_connect(_):
f = self._connect_once(loop, transport)
def session_done(x):
txaio.resolve(done_f, None)
def connect_error(fail):
if isinstance(fail.value, asyncio.CancelledError):
reconnect[0] = False
txaio.reject(done_f, fail)
return
self.log.debug(u'component failed: {error}', error=txaio.failure_message(fail))
self.log.debug(u'{tb}', tb=txaio.failure_format_traceback(fail))
if isinstance(fail.value, ApplicationError):
if (fail.value.error in [u'wamp.error.no_such_realm']):
reconnect[0] = False
self.log.error(u'Fatal error, not reconnecting')
txaio.reject(done_f, fail)
return
self.log.error(u'{msg}', msg=fail.value.error_message())
return one_reconnect_loop(None)
elif isinstance(fail.value, OSError):
self.log.info(u'Connection failed: {msg}', msg=txaio.failure_message(fail))
return one_reconnect_loop(None)
elif _is_ssl_error(fail.value):
self.log.error(u'TLS failure: {reason}', reason=fail.value.args[1])
self.log.error(u'Marking this transport as failed')
transport.failed()
else:
self.log.error(u'Connection failed: {error}', error=txaio.failure_message(fail))
self.log.debug(u'{tb}', tb=txaio.failure_format_traceback(fail))
return one_reconnect_loop(None)
txaio.add_callbacks(f, session_done, connect_error)
txaio.add_callbacks(delay_f, actual_connect, error)
if False:
if (not self._can_reconnect()):
self.log.info('No remaining transports to try')
reconnect[0] = False
def error(fail):
self.log.info('Internal error {msg}', msg=txaio.failure_message(fail))
self.log.debug('{tb}', tb=txaio.failure_format_traceback(fail))
txaio.reject(done_f, fail)
txaio.add_callbacks(f0, one_reconnect_loop, error)
return done_f
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.send`'
| def send(self, msg):
| if self.isOpen():
self.log.debug('WampRawSocketProtocol: TX WAMP message: {msg}', msg=msg)
try:
(payload, _) = self._serializer.serialize(msg)
except Exception as e:
raise SerializationError('WampRawSocketProtocol: unable to serialize WAMP application payload ({0})'.format(e))
else:
self.sendString(payload)
self.log.debug('WampRawSocketProtocol: TX octets: {octets}', octets=_LazyHexFormatter(payload))
else:
raise TransportLost()
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.isOpen`'
| def isOpen(self):
| return (hasattr(self, '_session') and (self._session is not None))
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.close`'
| def close(self):
| if self.isOpen():
self.transport.close()
else:
raise TransportLost()
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.abort`'
| def abort(self):
| if self.isOpen():
if hasattr(self.transport, 'abort'):
self.transport.abort()
else:
self.transport.close()
else:
raise TransportLost()
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.get_channel_id`'
| def get_channel_id(self, channel_id_type=u'tls-unique'):
| return None
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.get_channel_id`'
| def get_channel_id(self, channel_id_type=u'tls-unique'):
| return None
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializers: A list of WAMP serializers to use (or ``None``
for all available serializers).
:type serializers: list of objects implementing
:class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, serializers=None):
| if callable(factory):
self._factory = factory
else:
self._factory = (lambda : factory)
if (serializers is None):
serializers = get_serializers()
if (not serializers):
raise Exception('could not import any WAMP serializers')
self._serializers = {ser.RAWSOCKET_SERIALIZER_ID: ser for ser in serializers}
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializer: The WAMP serializer to use (or ``None`` for
"best" serializer, chosen as the first serializer available from
this list: CBOR, MessagePack, UBJSON, JSON).
:type serializer: object implementing :class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, serializer=None):
| if callable(factory):
self._factory = factory
else:
self._factory = (lambda : factory)
if (serializer is None):
serializers = get_serializers()
if serializers:
serializer = serializers[0]
if (serializer is None):
raise Exception('could not import any WAMP serializer')
self._serializer = serializer
|
'Implements :func:`autobahn.wamp.interfaces.ITransport.get_channel_id`'
| def get_channel_id(self):
| self.log.debug('FIXME: transport channel binding not implemented for asyncio (autobahn-python issue #729)')
return None
|
'.. note::
In addition to all arguments to the constructor of
:meth:`autobahn.websocket.interfaces.IWebSocketServerChannelFactory`,
you can supply a ``loop`` keyword argument to specify the
asyncio event loop to be used.'
| def __init__(self, *args, **kwargs):
| loop = kwargs.pop('loop', None)
self.loop = (loop or asyncio.get_event_loop())
protocol.WebSocketServerFactory.__init__(self, *args, **kwargs)
|
'.. note::
In addition to all arguments to the constructor of
:meth:`autobahn.websocket.interfaces.IWebSocketClientChannelFactory`,
you can supply a ``loop`` keyword argument to specify the
asyncio event loop to be used.'
| def __init__(self, *args, **kwargs):
| loop = kwargs.pop('loop', None)
self.loop = (loop or asyncio.get_event_loop())
protocol.WebSocketClientFactory.__init__(self, *args, **kwargs)
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializers: A list of WAMP serializers to use (or ``None``
for all available serializers).
:type serializers: list of objects implementing
:class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, *args, **kwargs):
| serializers = kwargs.pop('serializers', None)
websocket.WampWebSocketServerFactory.__init__(self, factory, serializers)
kwargs['protocols'] = self._protocols
WebSocketServerFactory.__init__(self, *args, **kwargs)
|
':param factory: A callable that produces instances that implement
:class:`autobahn.wamp.interfaces.ITransportHandler`
:type factory: callable
:param serializer: The WAMP serializer to use (or ``None`` for
"best" serializer, chosen as the first serializer available from
this list: CBOR, MessagePack, UBJSON, JSON).
:type serializer: object implementing :class:`autobahn.wamp.interfaces.ISerializer`'
| def __init__(self, factory, *args, **kwargs):
| serializers = kwargs.pop('serializers', None)
websocket.WampWebSocketClientFactory.__init__(self, factory, serializers)
kwargs['protocols'] = self._protocols
WebSocketClientFactory.__init__(self, *args, **kwargs)
|
':param url: The WebSocket URL of the WAMP router to connect to (e.g. `ws://somehost.com:8090/somepath`)
:type url: str
:param realm: The WAMP realm to join the application session to.
:type realm: str
:param extra: Optional extra configuration to forward to the application component.
:type extra: dict
:param serializers: A list of WAMP serializers to use (or None for default serializers).
Serializers must implement :class:`autobahn.wamp.interfaces.ISerializer`.
:type serializers: list
:param ssl: An (optional) SSL context instance or a bool. See
the documentation for the `loop.create_connection` asyncio
method, to which this value is passed as the ``ssl``
keyword parameter.
:type ssl: :class:`ssl.SSLContext` or bool
:param proxy: Explicit proxy server to use; a dict with ``host`` and ``port`` keys
:type proxy: dict or None
:param headers: Additional headers to send (only applies to WAMP-over-WebSocket).
:type headers: dict'
| def __init__(self, url, realm=None, extra=None, serializers=None, ssl=None, proxy=None, headers=None):
| assert (type(url) == six.text_type)
assert ((realm is None) or (type(realm) == six.text_type))
assert ((extra is None) or (type(extra) == dict))
assert ((headers is None) or (type(headers) == dict))
assert ((proxy is None) or (type(proxy) == dict))
self.url = url
self.realm = realm
self.extra = (extra or dict())
self.serializers = serializers
self.ssl = ssl
self.proxy = proxy
self.headers = headers
|
'Stop reconnecting, if auto-reconnecting was enabled.'
| @public
def stop(self):
| raise NotImplementedError()
|
'Run the application component. Under the hood, this runs the event
loop (unless `start_loop=False` is passed) so won\'t return
until the program is done.
:param make: A factory that produces instances of :class:`autobahn.asyncio.wamp.ApplicationSession`
when called with an instance of :class:`autobahn.wamp.types.ComponentConfig`.
:type make: callable
:param start_loop: When ``True`` (the default) this method
start a new asyncio loop.
:type start_loop: bool
:returns: None is returned, unless you specify
`start_loop=False` in which case the coroutine from calling
`loop.create_connection()` is returned. This will yield the
(transport, protocol) pair.'
| @public
def run(self, make, start_loop=True, log_level='info'):
| if callable(make):
def create():
cfg = ComponentConfig(self.realm, self.extra)
try:
session = make(cfg)
except Exception as e:
self.log.error('ApplicationSession could not be instantiated: {}'.format(e))
loop = asyncio.get_event_loop()
if loop.is_running():
loop.stop()
raise
else:
return session
else:
create = make
if self.url.startswith(u'rs'):
(isSecure, host, port) = parse_rs_url(self.url)
serializer = (self.serializers[0] if self.serializers else None)
transport_factory = WampRawSocketClientFactory(create, serializer=serializer)
else:
(isSecure, host, port, resource, path, params) = parse_ws_url(self.url)
transport_factory = WampWebSocketClientFactory(create, url=self.url, serializers=self.serializers, proxy=self.proxy, headers=self.headers)
offers = [PerMessageDeflateOffer()]
def accept(response):
if isinstance(response, PerMessageDeflateResponse):
return PerMessageDeflateResponseAccept(response)
transport_factory.setProtocolOptions(maxFramePayloadSize=1048576, maxMessagePayloadSize=1048576, autoFragmentSize=65536, failByDrop=False, openHandshakeTimeout=2.5, closeHandshakeTimeout=1.0, tcpNoDelay=True, autoPingInterval=10.0, autoPingTimeout=5.0, autoPingSize=4, perMessageCompressionOffers=offers, perMessageCompressionAccept=accept)
if (self.ssl is None):
ssl = isSecure
else:
if (self.ssl and (not isSecure)):
raise RuntimeError(('ssl argument value passed to %s conflicts with the "ws:" prefix of the url argument. Did you mean to use "wss:"?' % self.__class__.__name__))
ssl = self.ssl
loop = asyncio.get_event_loop()
txaio.use_asyncio()
txaio.config.loop = loop
coro = loop.create_connection(transport_factory, host, port, ssl=ssl)
if (not start_loop):
return coro
else:
(transport, protocol) = loop.run_until_complete(coro)
txaio.start_logging(level=log_level)
try:
loop.add_signal_handler(signal.SIGTERM, loop.stop)
except NotImplementedError:
pass
try:
loop.run_forever()
except KeyboardInterrupt:
pass
if protocol._session:
loop.run_until_complete(protocol._session.leave())
loop.close()
|
'IdGenerator follows the generator protocol'
| def test_idgenerator_is_generator(self):
| g = IdGenerator()
self.assertEqual(1, next(g))
self.assertEqual(2, next(g))
|
'A dictionary mapping field names to field values in this version
of the model.
Parent links of inherited multi-table models will not be followed.'
| @cached_property
def _local_field_dict(self):
| version_options = _get_options(self._model)
object_version = self._object_version
obj = object_version.object
model = self._model
field_dict = {}
for field_name in version_options.fields:
field = model._meta.get_field(field_name)
if isinstance(field, models.ManyToManyField):
if (field.attname in object_version.m2m_data):
field_dict[field.attname] = object_version.m2m_data[field.attname]
else:
field_dict[field.attname] = getattr(obj, field.attname)
return field_dict
|
'A dictionary mapping field names to field values in this version
of the model.
This method will follow parent links, if present.'
| @cached_property
def field_dict(self):
| field_dict = self._local_field_dict
for (parent_model, field) in self._model._meta.concrete_model._meta.parents.items():
content_type = _get_content_type(parent_model, self._state.db)
parent_id = field_dict[field.attname]
parent_version = self.revision.version_set.get(content_type=content_type, object_id=parent_id, db=self.db)
field_dict.update(parent_version.field_dict)
return field_dict
|
'Registers the model with reversion.'
| def reversion_register(self, model, **kwargs):
| register(model, **kwargs)
|
'Applies the correct ordering to the given version queryset.'
| def _reversion_order_version_queryset(self, queryset):
| if (not self.history_latest_first):
queryset = queryset.order_by(u'pk')
return queryset
|
'Displays a form that can recover a deleted model.'
| def recover_view(self, request, version_id, extra_context=None):
| if (not self.has_add_permission(request)):
raise PermissionDenied
version = get_object_or_404(Version, pk=version_id)
context = {u'title': (_(u'Recover %(name)s') % {u'name': version.object_repr}), u'recover': True}
context.update((extra_context or {}))
return self._reversion_revisionform_view(request, version, (self.recover_form_template or self._reversion_get_template_list(u'recover_form.html')), context)
|
'Displays the contents of the given revision.'
| def revision_view(self, request, object_id, version_id, extra_context=None):
| object_id = unquote(object_id)
version = get_object_or_404(Version, pk=version_id, object_id=object_id)
context = {u'title': (_(u'Revert %(name)s') % {u'name': version.object_repr}), u'revert': True}
context.update((extra_context or {}))
return self._reversion_revisionform_view(request, version, (self.revision_form_template or self._reversion_get_template_list(u'revision_form.html')), context)
|
'Displays a deleted model to allow recovery.'
| def recoverlist_view(self, request, extra_context=None):
| if ((not self.has_change_permission(request)) or (not self.has_add_permission(request))):
raise PermissionDenied
model = self.model
opts = model._meta
deleted = self._reversion_order_version_queryset(Version.objects.get_deleted(self.model))
request.current_app = self.admin_site.name
context = dict(self.admin_site.each_context(request), opts=opts, app_label=opts.app_label, module_name=capfirst(opts.verbose_name), title=(_(u'Recover deleted %(name)s') % {u'name': force_text(opts.verbose_name_plural)}), deleted=deleted)
context.update((extra_context or {}))
return render(request, (self.recover_list_template or self._reversion_get_template_list(u'recover_list.html')), context)
|
'Renders the history view.'
| def history_view(self, request, object_id, extra_context=None):
| if (not self.has_change_permission(request)):
raise PermissionDenied
object_id = unquote(object_id)
opts = self.model._meta
action_list = [{u'revision': version.revision, u'url': reverse((u'%s:%s_%s_revision' % (self.admin_site.name, opts.app_label, opts.model_name)), args=(quote(version.object_id), version.id))} for version in self._reversion_order_version_queryset(Version.objects.get_for_object_reference(self.model, object_id).select_related(u'revision__user'))]
context = {u'action_list': action_list}
context.update((extra_context or {}))
return super(VersionAdmin, self).history_view(request, object_id, context)
|
'Construct a new restoring initializer.
Will read from the checkpoint from the SSTables file `filename` using
the RestoreV2 Tensorflow op.
The actual variable read from the checkpoint will be
`scope_name` + \'/\' + `var_name` (or just `var_name` if `scope_name` is
empty), where `scope_name` is given by one of
(1) The current scope\'s name at the point where the initializer gets called,
if the `scope` argument to this constructor is None,
(2) If `scope` is callable, the result of applying it to the current scope\'s
name,
(3) Otherwise, the `scope` argument to this constructor itself.
Args:
filename: Name of an SSTables entry where the checkpoint is hosted.
var_name: Name of the variable to restore.
scope: The variable scope\'s name of the variable to restore, see above.'
| def __init__(self, filename, var_name, scope=None):
| self._filename = filename
self._var_name = var_name
self._scope = scope
|
'Build magic (and sparsely documented) shapes_and_slices spec string.'
| def _partition_spec(self, shape, partition_info):
| if (partition_info is None):
return ''
ssi = tf.Variable.SaveSliceInfo(full_name=self._var_name, full_shape=partition_info.full_shape, var_offset=partition_info.var_offset, var_shape=shape)
return ssi.spec
|
'Check if an error is raised if we don\'t specify the is_training flag.'
| @parameterized.Parameters((True, False, False), (False, True, False), (False, False, True))
def testBatchNormBuildFlag(self, use_batch_norm_h, use_batch_norm_x, use_batch_norm_c):
| batch_size = 2
hidden_size = 4
inputs = tf.placeholder(tf.float32, shape=[batch_size, hidden_size])
prev_cell = tf.placeholder(tf.float32, shape=[batch_size, hidden_size])
prev_hidden = tf.placeholder(tf.float32, shape=[batch_size, hidden_size])
err = 'is_training flag must be explicitly specified'
with self.assertRaisesRegexp(ValueError, err):
lstm = snt.LSTM(hidden_size, use_batch_norm_h=use_batch_norm_h, use_batch_norm_x=use_batch_norm_x, use_batch_norm_c=use_batch_norm_c)
lstm(inputs, (prev_cell, prev_hidden))
|
'Test that everything trains OK, with or without trainable init. state.'
| @parameterized.Parameters((False, 1), (False, 2), (True, 1), (True, 2))
def testTraining(self, trainable_initial_state, max_unique_stats):
| hidden_size = 3
batch_size = 3
time_steps = 3
cell = snt.BatchNormLSTM(hidden_size=hidden_size, max_unique_stats=max_unique_stats)
inputs = tf.constant(np.random.rand(batch_size, time_steps, 3), dtype=tf.float32)
initial_state = cell.initial_state(batch_size, tf.float32, trainable_initial_state)
(output, _) = tf.nn.dynamic_rnn(cell.with_batch_norm_control(is_training=True), inputs, initial_state=initial_state, dtype=tf.float32)
loss = tf.reduce_mean(tf.square((output - np.random.rand(batch_size, time_steps, hidden_size))))
train_op = tf.train.GradientDescentOptimizer(1).minimize(loss)
init = tf.global_variables_initializer()
with self.test_session():
init.run()
train_op.run()
|
'Test that training works, with or without trainable initial state.'
| @parameterized.Parameters((snt.Conv1DLSTM, 1, False), (snt.Conv1DLSTM, 1, True), (snt.Conv2DLSTM, 2, False), (snt.Conv2DLSTM, 2, True))
def testTraining(self, lstm_class, dim, trainable_initial_state):
| time_steps = 1
batch_size = 2
input_shape = ((8,) * dim)
input_channels = 3
output_channels = 5
input_shape = (((batch_size,) + input_shape) + (input_channels,))
lstm = lstm_class(input_shape=input_shape[1:], output_channels=output_channels, kernel_shape=1)
inputs = tf.random_normal(((time_steps,) + input_shape), dtype=tf.float32)
initial_state = lstm.initial_state(batch_size, tf.float32, trainable_initial_state)
(output, _) = tf.nn.dynamic_rnn(lstm, inputs, time_major=True, initial_state=initial_state, dtype=tf.float32)
loss = tf.reduce_mean(tf.square(output))
train_op = tf.train.GradientDescentOptimizer(1).minimize(loss)
init = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init)
sess.run(train_op)
|
'Tests block lower-triangular matrix.'
| def test_lower(self):
| btm = block_matrix.BlockTriangularMatrix(block_shape=(2, 3), block_rows=3, upper=False)
self.assertEqual(btm.num_blocks, 6)
self.assertEqual(btm.block_size, 6)
self.assertEqual(btm.input_size, 36)
output = btm(create_input(btm.input_size))
with self.test_session() as sess:
result = sess.run(output)
self._check_output_size(btm, result)
expected = np.array([[[0, 1, 2, 0, 0, 0, 0, 0, 0], [3, 4, 5, 0, 0, 0, 0, 0, 0], [6, 7, 8, 9, 10, 11, 0, 0, 0], [12, 13, 14, 15, 16, 17, 0, 0, 0], [18, 19, 20, 21, 22, 23, 24, 25, 26], [27, 28, 29, 30, 31, 32, 33, 34, 35]]])
self.assertAllEqual(result, expected)
|
'Tests block lower-triangular matrix without diagonal.'
| def test_lower_no_diagonal(self):
| btm = block_matrix.BlockTriangularMatrix(block_shape=(2, 3), block_rows=3, include_diagonal=False)
self.assertEqual(btm.num_blocks, 3)
self.assertEqual(btm.block_size, 6)
self.assertEqual(btm.input_size, 18)
output = btm(create_input(btm.input_size))
with self.test_session() as sess:
result = sess.run(output)
self._check_output_size(btm, result)
expected = np.array([[[0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 0, 0, 0, 0, 0], [3, 4, 5, 0, 0, 0, 0, 0, 0], [6, 7, 8, 9, 10, 11, 0, 0, 0], [12, 13, 14, 15, 16, 17, 0, 0, 0]]])
self.assertAllEqual(result, expected)
|
'Tests block upper-triangular matrix.'
| def test_upper(self):
| btm = block_matrix.BlockTriangularMatrix(block_shape=(2, 3), block_rows=3, upper=True)
self.assertEqual(btm.num_blocks, 6)
self.assertEqual(btm.block_size, 6)
self.assertEqual(btm.input_size, 36)
output = btm(create_input(btm.input_size))
with self.test_session() as sess:
result = sess.run(output)
self._check_output_size(btm, result)
expected = np.array([[[0, 1, 2, 3, 4, 5, 6, 7, 8], [9, 10, 11, 12, 13, 14, 15, 16, 17], [0, 0, 0, 18, 19, 20, 21, 22, 23], [0, 0, 0, 24, 25, 26, 27, 28, 29], [0, 0, 0, 0, 0, 0, 30, 31, 32], [0, 0, 0, 0, 0, 0, 33, 34, 35]]])
self.assertAllEqual(result, expected)
|
'Tests block upper-triangular matrix without diagonal.'
| def test_upper_no_diagonal(self):
| btm = block_matrix.BlockTriangularMatrix(block_shape=(2, 3), block_rows=3, upper=True, include_diagonal=False)
self.assertEqual(btm.num_blocks, 3)
self.assertEqual(btm.block_size, 6)
self.assertEqual(btm.input_size, 18)
output = btm(create_input(btm.input_size))
with self.test_session() as sess:
result = sess.run(output)
self._check_output_size(btm, result)
expected = np.array([[[0, 0, 0, 0, 1, 2, 3, 4, 5], [0, 0, 0, 6, 7, 8, 9, 10, 11], [0, 0, 0, 0, 0, 0, 12, 13, 14], [0, 0, 0, 0, 0, 0, 15, 16, 17], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0]]])
self.assertAllEqual(result, expected)
|
'Tests batching.'
| def test_batch(self):
| btm = block_matrix.BlockTriangularMatrix(block_shape=(2, 2), block_rows=2, upper=False)
output = btm(create_input(12, batch_size=2))
with self.test_session() as sess:
result = sess.run(output)
self._check_output_size(btm, result, batch_size=2)
expected = np.array([[[0, 1, 0, 0], [2, 3, 0, 0], [4, 5, 6, 7], [8, 9, 10, 11]], [[12, 13, 0, 0], [14, 15, 0, 0], [16, 17, 18, 19], [20, 21, 22, 23]]])
self.assertAllEqual(result, expected)
|
'Tests BlockDiagonalMatrix.'
| def test_default(self):
| bdm = block_matrix.BlockDiagonalMatrix(block_shape=(2, 3), block_rows=3)
self.assertEqual(bdm.num_blocks, 3)
self.assertEqual(bdm.block_size, 6)
self.assertEqual(bdm.input_size, 18)
output = bdm(create_input(bdm.input_size))
with self.test_session() as sess:
result = sess.run(output)
expected = np.array([[[0, 1, 2, 0, 0, 0, 0, 0, 0], [3, 4, 5, 0, 0, 0, 0, 0, 0], [0, 0, 0, 6, 7, 8, 0, 0, 0], [0, 0, 0, 9, 10, 11, 0, 0, 0], [0, 0, 0, 0, 0, 0, 12, 13, 14], [0, 0, 0, 0, 0, 0, 15, 16, 17]]])
self.assertAllEqual(result, expected)
|
'Tests properties of BlockDiagonalMatrix.'
| def test_properties(self):
| bdm = block_matrix.BlockDiagonalMatrix(block_shape=(3, 5), block_rows=7)
self.assertEqual(bdm.num_blocks, 7)
self.assertEqual(bdm.block_size, 15)
self.assertEqual(bdm.input_size, 105)
self.assertEqual(bdm.output_shape, (21, 35))
self.assertEqual(bdm.block_shape, (3, 5))
|
'Performs the initialisation necessary for all AbstractModule instances.
Every subclass of AbstractModule must begin their constructor with a call to
this constructor, i.e. `super(MySubModule, self).__init__(name=name)`.
If you instantiate sub-modules in __init__ you must create them within the
`_enter_variable_scope` context manager to ensure they are in the module\'s
variable scope. Alternatively, instantiate sub-modules in `_build`.
Args:
_sentinel: Variable that only carries a non-None value if `__init__` was
called without named parameters. If this is the case, a deprecation
warning is issued in form of a `ValueError`.
custom_getter: Callable or dictionary of callables to use as
custom getters inside the module. If a dictionary, the keys
correspond to regexes to match variable names. See the `tf.get_variable`
documentation for information about the custom_getter API.
name: Name of this module. Used to construct the Templated build function.
If `None` the module\'s class name is used (converted to snake case).
Raises:
TypeError: If `name` is not a string.
TypeError: If a given `custom_getter` is not callable.
ValueError: If `__init__` was called without named arguments.'
| def __init__(self, _sentinel=None, custom_getter=None, name=None):
| if (_sentinel is not None):
raise ValueError('Calling AbstractModule.__init__ without named arguments is deprecated.')
if (name is None):
name = util.to_snake_case(self.__class__.__name__)
elif (not isinstance(name, six.string_types)):
raise TypeError('Name must be a string.')
self._connected_subgraphs = []
if isinstance(custom_getter, collections.Mapping):
self._custom_getter = util._custom_getter_router(custom_getter_map=custom_getter, name_fn=(lambda name: name[(len(self.scope_name) + 1):]))
else:
if (not ((custom_getter is None) or callable(custom_getter))):
raise TypeError('Given custom_getter is not callable.')
self._custom_getter = custom_getter
self._template = tf.make_template(name, self._build_wrapper, create_scope_now_=True, custom_getter_=self._custom_getter)
self._original_name = name
self._unique_name = self._template.variable_scope.name.split('/')[(-1)]
self.__doc__ = self._build.__doc__
self.__call__.__func__.__doc__ = self._build.__doc__
self._graph = None
|
'Function which will be wrapped in a Template to do variable sharing.
Passes through all arguments to the _build method, and returns the
corresponding outputs, plus the name_scope generated by this call of the
template.
Args:
*args: args list for self._build
**kwargs: kwargs dict for self._build
Returns:
A tuple containing (output from _build, scope_name).'
| def _build_wrapper(self, *args, **kwargs):
| output = self._build(*args, **kwargs)
with tf.name_scope('dummy') as scope_name:
this_scope_name = scope_name[:(- len('/dummy/'))]
return (output, this_scope_name)
|
'Checks that the base class\'s __init__ method has been called.
Raises:
NotInitializedError: `AbstractModule.__init__` has not been called.'
| def _check_init_called(self):
| try:
self._template
except AttributeError:
raise NotInitializedError(('You may have forgotten to call super at the start of %s.__init__.' % self.__class__.__name__))
|
'Checks that the module is not being connect to multiple Graphs.
An instance of a Sonnet module \'owns\' the variables it contains, and permits
seamless variable sharing. As such, connecting a single module instance to
multiple Graphs is not possible - this function will raise an error should
that occur.
Raises:
DifferentGraphError: if the module is connected to a different Graph than
it was previously used in.'
| def _check_same_graph(self):
| current_graph = tf.get_default_graph()
if (self._graph is None):
self._graph = current_graph
elif (self._graph != current_graph):
raise DifferentGraphError('Cannot connect module to multiple Graphs.')
|
'Operator overload for calling.
This is the entry point when users connect a Module into the Graph. The
underlying _build method will have been wrapped in a Template by the
constructor, and we call this template with the provided inputs here.
Args:
*args: Arguments for underlying _build method.
**kwargs: Keyword arguments for underlying _build method.
Returns:
The result of the underlying _build method.'
| def __call__(self, *args, **kwargs):
| self._check_init_called()
self._check_same_graph()
(outputs, this_name_scope) = self._template(*args, **kwargs)
inputs = SubgraphInputs(args, kwargs)
self._connected_subgraphs.append(ConnectedSubGraph(self, this_name_scope, inputs, outputs))
return outputs
|
'Returns a tuple of all name_scopes generated by this module.'
| @property
def name_scopes(self):
| return tuple((subgraph.name_scope for subgraph in self._connected_subgraphs))
|
'Returns the variable_scope declared by the module.
It is valid for library users to access the internal templated
variable_scope, but only makes sense to do so after connection. Therefore we
raise an error here if the variable_scope is requested before connection.
The only case where it does make sense to access the variable_scope before
connection is to get the post-uniquification name, which we support using
the separate .name property.
Returns:
variable_scope: `tf.VariableScope` instance of the internal `tf.Template`.
Raises:
NotConnectedError: If the module is not connected to the Graph.'
| @property
def variable_scope(self):
| self._ensure_is_connected()
return self._template.variable_scope
|
'Returns the full name of the Module\'s variable scope.'
| @property
def scope_name(self):
| return self._template.variable_scope.name
|
'Returns the name of the Module.'
| @property
def module_name(self):
| return self._unique_name
|
'Returns true iff the Module been connected to the Graph at least once.'
| @property
def is_connected(self):
| return bool(self._connected_subgraphs)
|
'Returns the subgraphs created by this module so far.'
| @property
def connected_subgraphs(self):
| return tuple(self._connected_subgraphs)
|
'Returns the last subgraph created by this module.
Returns:
The last connected subgraph.
Raises:
NotConnectedError: If the module is not connected to the Graph.'
| @property
def last_connected_subgraph(self):
| self._ensure_is_connected()
return self._connected_subgraphs[(-1)]
|
'Returns the keys the dictionary of variable initializers may contain.
This provides the user with a way of knowing the initializer keys that are
available without having to instantiate a sonnet module. Subclasses may
override this class method if they need additional arguments to determine
what initializer keys may be provided.
Returns:
Set with strings corresponding to the strings that may be passed to the
constructor.'
| @classmethod
def get_possible_initializer_keys(cls):
| return getattr(cls, 'POSSIBLE_INITIALIZER_KEYS', set())
|
'Raise an Error if the module has not been connected yet.
Until the module is connected into the Graph, any variables created do
not exist yet and cannot be created in advance due to not knowing the size
of the input Tensor(s). This assertion ensures that any variables contained
in this module must now exist.
Raises:
NotConnectedError: If the module is not connected to the Graph.'
| def _ensure_is_connected(self):
| if (not self.is_connected):
raise NotConnectedError('Variables in {} not instantiated yet, __call__ the module first.'.format(self.scope_name))
|
'Returns a contextlib.contextmanager to enter the internal variable scope.
This is useful for situations where submodules must be declared in the
constructor, or somewhere else that is not called under the `_build` method.
If such a case arises, calling `with self._enter_variable_scope():` will
cause the variables in the submodule to be correctly scoped.
An example justification for this is to allow the `Transposable` interface
to be implemented - you might want to construct all the submodules at
construction time so that you can call `.transpose()` and connect the
result of that before connecting the non-transposed module.
```python
class SomeModule(snt.AbstractModule):
def __init__(self, name="some_module"):
super(SomeModule, self).__init__(name=name)
with self._enter_variable_scope():
# We need to construct this submodule before we get to the _build
# method, for some reason.
self._sub_mod = snt.SomeSubmodule(name="some_submodule")
def _build(self, input):
# Connect to the already constructed submodule.
return self._sub_mod(input)
If you omit this then the submodule and parent module will appear to
be "side by side" rather than nested when viewed in the Graph viewer, and
functions such as `snt.get_variables_in_module()` or the `get_variables()`
method will not know about variables defined in the submodule.
Args:
reuse: Boolean passed to `tf.variable_scope`.
Returns:
`contextlib.contextmanager` of the variable_scope inside the template.'
| def _enter_variable_scope(self, reuse=None):
| self._check_init_called()
self._check_same_graph()
return tf.variable_scope(self._template.variable_scope, reuse=reuse)
|
'Returns tuple of `tf.Variable`s declared inside this module.
Note that this operates by searching this module\'s variable scope,
and so does not know about any modules that were constructed elsewhere but
used inside this module.
Args:
collection: Collection to restrict query to. By default this is
`tf.Graphkeys.TRAINABLE_VARIABLES`, which doesn\'t include non-trainable
variables such as moving averages.
Returns:
A tuple of `tf.Variable` objects.
Raises:
NotConnectedError: If the module is not connected to the Graph.'
| def get_variables(self, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
| return util.get_variables_in_scope(self.variable_scope, collection=collection)
|
'Constructs a module with a given build function.
The Module class can be used to wrap a function assembling a network into a
module.
For example, the following code implements a simple one-hidden-layer MLP
model by defining a function called make_model and using a Module instance
to wrap it.
```python
def make_model(inputs):
lin1 = snt.Linear(name="lin1", output_size=10)(inputs)
relu1 = tf.nn.relu(lin1, name="relu1")
lin2 = snt.Linear(name="lin2", output_size=20)(relu1)
return lin2
model = snt.Module(name=\'simple_mlp\', build=make_model)
outputs = model(inputs)
The `partial` package from `functools` can be used to bake configuration
parameters into the function at construction time, as shown in the following
example.
```python
from functools import partial
def make_model(inputs, output_sizes):
lin1 = snt.Linear(name="lin1", output_size=output_sizes[0])(inputs)
relu1 = tf.nn.relu(lin1, name="relu1")
lin2 = snt.Linear(name="lin2", output_size=output_sizes[1])(relu1)
return lin2
model = snt.Module(name=\'simple_mlp\',
build=partial(make_model, output_size=[10, 20])
outputs = model(inputs)
Args:
build: Callable to be invoked when connecting the module to the graph.
The `build` function is invoked when the module is called, and its
role is to specify how to add elements to the Graph, and how to
compute output Tensors from input Tensors.
The `build` function signature can include the following parameters:
*args - Input Tensors.
**kwargs - Additional Python parameters controlling connection.
name: Module name. If set to `None` (the default), the name will be set to
that of the `build` callable converted to `snake_case`. If `build` has
no name, the name will be \'module\'.
Raises:
TypeError: If build is not callable.'
| def __init__(self, build, name=None):
| if (not callable(build)):
raise TypeError("Input 'build' must be callable.")
if (name is None):
name = util.name_for_callable(build)
super(Module, self).__init__(name=name)
self._build_function = build
|
'Forwards call to the passed-in build function.'
| def _build(self, *args, **kwargs):
| return self._build_function(*args, **kwargs)
|
'Constructor.
Args:
core: A `sonnet.RNNCore` object. This should only take a single `Tensor`
in input, and output only a single flat `Tensor`.
output_size: An integer. The size of each output in the sequence.
threshold: A float between 0 and 1. Probability to reach for ACT to stop
pondering.
get_state_for_halting: A callable that can take the `core` state and
return the input to the halting function.
name: A string. The name of this module.
Raises:
ValueError: if `threshold` is not between 0 and 1.
ValueError: if `core` has either nested outputs or outputs that are not
one dimensional.'
| def __init__(self, core, output_size, threshold, get_state_for_halting, name='act_core'):
| super(ACTCore, self).__init__(name=name)
self._core = core
self._output_size = output_size
self._threshold = threshold
self._get_state_for_halting = get_state_for_halting
if (not isinstance(self._core.output_size, tf.TensorShape)):
raise ValueError('Output of core should be single Tensor.')
if (self._core.output_size.ndims != 1):
raise ValueError('Output of core should be 1D.')
if (not (0 <= self._threshold <= 1)):
raise ValueError('Threshold should be between 0 and 1, but found {}'.format(self._threshold))
|
'The `cond` of the `tf.while_loop`.'
| def _cond(self, unused_x, unused_cumul_out, unused_prev_state, unused_cumul_state, cumul_halting, unused_iteration, unused_remainder):
| return tf.reduce_any((cumul_halting < 1))
|
'The `body` of `tf.while_loop`.'
| def _body(self, x, cumul_out, prev_state, cumul_state, cumul_halting, iteration, remainder, halting_linear, x_ones):
| all_ones = tf.constant(1, shape=(self._batch_size, 1), dtype=self._dtype)
is_iteration_over = tf.equal(cumul_halting, all_ones)
next_iteration = tf.where(is_iteration_over, iteration, (iteration + 1))
(out, next_state) = self._core(x, prev_state)
halting_input = halting_linear(self._get_state_for_halting(next_state))
halting = tf.sigmoid(halting_input, name='halting')
next_cumul_halting_raw = (cumul_halting + halting)
over_threshold = (next_cumul_halting_raw > self._threshold)
next_cumul_halting = tf.where(over_threshold, all_ones, next_cumul_halting_raw)
next_remainder = tf.where(over_threshold, remainder, (1 - next_cumul_halting_raw))
p = (next_cumul_halting - cumul_halting)
next_cumul_state = _nested_add(cumul_state, _nested_unary_mul(next_state, p))
next_cumul_out = (cumul_out + (p * out))
return (x_ones, next_cumul_out, next_state, next_cumul_state, next_cumul_halting, next_iteration, next_remainder)
|
'Connects the core to the graph.
Args:
x: Input `Tensor` of shape `(batch_size, input_size)`.
prev_state: Previous state. This could be a `Tensor`, or a tuple of
`Tensor`s.
Returns:
The tuple `(output, state)` for this core.
Raises:
ValueError: if the `Tensor` `x` does not have rank 2.'
| def _build(self, x, prev_state):
| x.get_shape().with_rank(2)
self._batch_size = x.get_shape().as_list()[0]
self._dtype = x.dtype
x_zeros = tf.concat([x, tf.zeros(shape=(self._batch_size, 1), dtype=self._dtype)], 1)
x_ones = tf.concat([x, tf.ones(shape=(self._batch_size, 1), dtype=self._dtype)], 1)
halting_linear = basic.Linear(name='halting_linear', output_size=1)
body = functools.partial(self._body, halting_linear=halting_linear, x_ones=x_ones)
cumul_halting_init = tf.zeros(shape=(self._batch_size, 1), dtype=self._dtype)
iteration_init = tf.zeros(shape=(self._batch_size, 1), dtype=self._dtype)
core_output_size = [x.value for x in self._core.output_size]
out_init = tf.zeros(shape=((self._batch_size,) + tuple(core_output_size)), dtype=self._dtype)
cumul_state_init = _nested_zeros_like(prev_state)
remainder_init = tf.zeros(shape=(self._batch_size, 1), dtype=self._dtype)
(unused_final_x, final_out, unused_final_state, final_cumul_state, unused_final_halting, final_iteration, final_remainder) = tf.while_loop(self._cond, body, [x_zeros, out_init, prev_state, cumul_state_init, cumul_halting_init, iteration_init, remainder_init])
act_output = basic.Linear(name='act_output_linear', output_size=self._output_size)(final_out)
return ((act_output, (final_iteration, final_remainder)), final_cumul_state)
|
'Test whether the result for different Data Formats is the same.'
| def helperDataFormats(self, func, x, use_bias, atol=1e-05):
| mod1 = func(name='default')
mod2 = func(name='NCHW_conv', data_format='NCHW')
x_transpose = tf.transpose(x, perm=(0, 3, 1, 2))
o1 = mod1(x)
o2 = tf.transpose(mod2(x_transpose), perm=(0, 2, 3, 1))
with self.test_session(use_gpu=True, force_gpu=True):
tf.global_variables_initializer().run()
self.assertAllClose(o1.eval(), o2.eval(), atol=atol)
|
'Test data formats for Conv2D.'
| @parameterized.NamedParameters(('WithBias', True), ('WithoutBias', False))
def testConv2DDataFormats(self, use_bias):
| func = functools.partial(snt.Conv2D, output_channels=self.OUT_CHANNELS, kernel_shape=self.KERNEL_SHAPE, use_bias=use_bias, initializers=create_constant_initializers(1.0, 1.0, use_bias))
x = tf.constant(np.random.random(self.INPUT_SHAPE).astype(np.float32))
self.helperDataFormats(func, x, use_bias)
|
'Test data formats for Conv2DTranspose.'
| @parameterized.NamedParameters(('WithBias', True), ('WithoutBias', False))
def testConv2DTransposeDataFormats(self, use_bias):
| (mb, h, w, c) = self.INPUT_SHAPE
def func(name, data_format='NHWC'):
shape = (self.INPUT_SHAPE if (data_format == 'NHWC') else (mb, c, h, w))
temp_input = tf.constant(0.0, dtype=tf.float32, shape=shape)
mod = snt.Conv2D(name=name, output_channels=self.OUT_CHANNELS, kernel_shape=self.KERNEL_SHAPE, use_bias=use_bias, initializers=create_constant_initializers(1.0, 1.0, use_bias), data_format=data_format)
_ = mod(temp_input)
return mod.transpose(name=(name + 'Trans'))
shape = (mb, h, w, self.OUT_CHANNELS)
x = tf.constant(np.random.random(shape).astype(np.float32))
self.helperDataFormats(func, x, use_bias)
|
'Tests data formats for the convolutions with batch normalization.'
| @parameterized.NamedParameters(('WithBias', True), ('WithoutBias', False))
def testConv2DDataFormatsBatchNorm(self, use_bias):
| def func(name, data_format='NHWC'):
conv = snt.Conv2D(name=name, output_channels=self.OUT_CHANNELS, kernel_shape=self.KERNEL_SHAPE, use_bias=use_bias, initializers=create_constant_initializers(1.0, 1.0, use_bias), data_format=data_format)
if (data_format == 'NHWC'):
bn = snt.BatchNorm(scale=True, update_ops_collection=None)
else:
bn = snt.BatchNorm(scale=True, update_ops_collection=None, fused=True, axis=(0, 2, 3))
return snt.Sequential([conv, functools.partial(bn, is_training=True)])
x = tf.constant(np.random.random(self.INPUT_SHAPE).astype(np.float32))
self.helperDataFormats(func, x, use_bias)
|
'Constructs a LayerNorm module.
Args:
eps: small epsilon to avoid division by zero variance. Defaults to
1e-5 as used in the paper.
initializers: Dict containing ops to initialize the scale
(with key \'gamma\') and bias (with key \'beta\').
partitioners: Optional dict containing partitioners to partition
the scale (with key \'gamma\') and bias (with key \'beta\'). As a default,
no partitioners are used.
regularizers: Optional dict containing regularizers for the scale (with
key \'gamma\') and bias (with key \'beta\').. As a default, no regularizers
are used.
name: name of the module.
Raises:
KeyError: If `initializers`, `partitioners` or `regularizers` contain
any keys other than `gamma` or `beta`.
TypeError: If any of the given initializers, partitioners or regularizers
are not callable.'
| def __init__(self, eps=1e-05, initializers=None, partitioners=None, regularizers=None, name='layer_norm'):
| super(LayerNorm, self).__init__(name=name)
self._eps = eps
self._initializers = util.check_initializers(initializers, self.POSSIBLE_INITIALIZER_KEYS)
self._partitioners = util.check_partitioners(partitioners, self.POSSIBLE_INITIALIZER_KEYS)
self._regularizers = util.check_regularizers(regularizers, self.POSSIBLE_INITIALIZER_KEYS)
|
'Connects the LayerNorm module into the graph.
Args:
inputs: a Tensor of shape `[batch_size, layer_dim]`.
Returns:
normalized: layer normalized outputs with same shape as inputs.
Raises:
base.NotSupportedError: If `inputs` has data type of `tf.float16`.'
| def _build(self, inputs):
| if (inputs.dtype == tf.float16):
raise base.NotSupportedError('LayerNorm does not support `tf.float16`, insufficient precision for calculating sufficient statistics.')
if (inputs.get_shape().ndims != 2):
raise base.NotSupportedError('Layer normalization expects inputs of rank 2. Got inputs of rank {}.'.format(inputs.get_shape().ndims))
hidden_size = inputs.get_shape()[1].value
if (self.GAMMA not in self._initializers):
self._initializers[self.GAMMA] = create_gamma_initializer()
self._gamma = tf.get_variable(self.GAMMA, shape=[hidden_size], dtype=inputs.dtype, initializer=self._initializers[self.GAMMA], partitioner=self._partitioners.get(self.GAMMA), regularizer=self._regularizers.get(self.GAMMA))
if (self.BETA not in self._initializers):
self._initializers[self.BETA] = create_beta_initializer()
self._beta = tf.get_variable(self.BETA, shape=[hidden_size], dtype=inputs.dtype, initializer=self._initializers[self.BETA], partitioner=self._partitioners.get(self.BETA), regularizer=self._regularizers.get(self.BETA))
(mean, var) = tf.nn.moments(inputs, [1], keep_dims=True)
normalized = tf.nn.batch_normalization(inputs, mean, var, self._beta, self._gamma, self._eps)
return normalized
|
'Check that inputs are approximately centered and scaled.'
| def testNormalization(self):
| inputs = tf.constant([[1.0, 2.0, 3.0], [6.0, 4.0, 7.0]], dtype=tf.float32)
ln = snt.LayerNorm()
outputs = ln(inputs)
init = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init)
outputs_ = sess.run(outputs)
self.assertAllClose(outputs_.mean(axis=1), [0.0, 0.0], atol=0.0001)
self.assertAllClose(outputs_.var(axis=1), [1.0, 1.0], atol=0.0001)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.