repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
justacec/bokeh | bokeh/server/tornado.py | 2 | 15041 | ''' Provides the Bokeh Server Tornado application.
'''
from __future__ import absolute_import, print_function
import logging
log = logging.getLogger(__name__)
import atexit
# NOTE: needs PyPI backport on Python 2 (https://pypi.python.org/pypi/futures)
from concurrent.futures import ProcessPoolExecutor
import os
from pprint import pformat
import signal
from tornado import gen
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.web import Application as TornadoApplication
from tornado.web import HTTPError
from tornado.web import StaticFileHandler
from bokeh.resources import Resources
from bokeh.settings import settings
from .urls import per_app_patterns, toplevel_patterns
from .connection import ServerConnection
from .application_context import ApplicationContext
from .views.static_handler import StaticHandler
def match_host(host, pattern):
""" Match host against pattern
>>> match_host('192.168.0.1:80', '192.168.0.1:80')
True
>>> match_host('192.168.0.1:80', '192.168.0.1')
True
>>> match_host('192.168.0.1:80', '192.168.0.1:8080')
False
>>> match_host('192.168.0.1', '192.168.0.2')
False
>>> match_host('192.168.0.1', '192.168.*.*')
True
>>> match_host('alice', 'alice')
True
>>> match_host('alice:80', 'alice')
True
>>> match_host('alice', 'bob')
False
>>> match_host('foo.example.com', 'foo.example.com.net')
False
>>> match_host('alice', '*')
True
>>> match_host('alice', '*:*')
True
>>> match_host('alice:80', '*')
True
>>> match_host('alice:80', '*:80')
True
>>> match_host('alice:8080', '*:80')
False
"""
if ':' in host:
host, host_port = host.rsplit(':', 1)
else:
host_port = None
if ':' in pattern:
pattern, pattern_port = pattern.rsplit(':', 1)
if pattern_port == '*':
pattern_port = None
else:
pattern_port = None
if pattern_port is not None and host_port != pattern_port:
return False
host = host.split('.')
pattern = pattern.split('.')
if len(pattern) > len(host):
return False
for h, p in zip(host, pattern):
if h == p or p == '*':
continue
else:
return False
return True
# factored out to be easier to test
def check_whitelist(request_host, whitelist):
''' Check a given request host against a whitelist.
'''
if ':' not in request_host:
request_host = request_host + ':80'
if request_host in whitelist:
return True
return any(match_host(request_host, host) for host in whitelist)
def _whitelist(handler_class):
if hasattr(handler_class.prepare, 'patched'):
return
old_prepare = handler_class.prepare
def _prepare(self, *args, **kw):
if not check_whitelist(self.request.host, self.application._hosts):
log.info("Rejected connection from host '%s' because it is not in the --host whitelist" % self.request.host)
raise HTTPError(403)
return old_prepare(self, *args, **kw)
_prepare.patched = True
handler_class.prepare = _prepare
class BokehTornado(TornadoApplication):
''' A Tornado Application used to implement the Bokeh Server.
The Server class is the main public interface, this class has
Tornado implementation details.
Args:
applications (dict of str : bokeh.application.Application) : map from paths to Application instances
The application is used to create documents for each session.
extra_patterns (seq[tuple]) : tuples of (str, http or websocket handler)
Use this argument to add additional endpoints to custom deployments
of the Bokeh Server.
prefix (str) : a URL prefix to use for all Bokeh server paths
hosts (list) : hosts that are valid values for the Host header
secret_key (str) : secret key for signing session IDs
sign_sessions (boolean) : whether to sign session IDs
generate_session_ids (boolean) : whether to generate a session ID when none is provided
extra_websocket_origins (list) : hosts that can connect to the websocket
These are in addition to ``hosts``.
keep_alive_milliseconds (int) : number of milliseconds between keep-alive pings
Set to 0 to disable pings. Pings keep the websocket open.
check_unused_sessions_milliseconds (int) : number of milliseconds between check for unused sessions
unused_session_lifetime_milliseconds (int) : number of milliseconds for unused session lifetime
stats_log_frequency_milliseconds (int) : number of milliseconds between logging stats
develop (boolean) : True for develop mode
'''
def __init__(self, applications, prefix, hosts,
extra_websocket_origins,
io_loop=None,
extra_patterns=None,
secret_key=settings.secret_key_bytes(),
sign_sessions=settings.sign_sessions(),
generate_session_ids=True,
# heroku, nginx default to 60s timeout, so well less than that
keep_alive_milliseconds=37000,
# how often to check for unused sessions
check_unused_sessions_milliseconds=17000,
# how long unused sessions last
unused_session_lifetime_milliseconds=15000,
# how often to log stats
stats_log_frequency_milliseconds=15000,
develop=False):
self._prefix = prefix
if io_loop is None:
io_loop = IOLoop.current()
self._loop = io_loop
if keep_alive_milliseconds < 0:
# 0 means "disable"
raise ValueError("keep_alive_milliseconds must be >= 0")
if check_unused_sessions_milliseconds <= 0:
raise ValueError("check_unused_sessions_milliseconds must be > 0")
if unused_session_lifetime_milliseconds <= 0:
raise ValueError("check_unused_sessions_milliseconds must be > 0")
if stats_log_frequency_milliseconds <= 0:
raise ValueError("stats_log_frequency_milliseconds must be > 0")
self._hosts = set(hosts)
self._websocket_origins = self._hosts | set(extra_websocket_origins)
self._resources = {}
self._develop = develop
self._secret_key = secret_key
self._sign_sessions = sign_sessions
self._generate_session_ids = generate_session_ids
log.debug("Allowed Host headers: %r", list(self._hosts))
log.debug("These host origins can connect to the websocket: %r", list(self._websocket_origins))
# Wrap applications in ApplicationContext
self._applications = dict()
for k,v in applications.items():
self._applications[k] = ApplicationContext(v, self._develop, self._loop)
extra_patterns = extra_patterns or []
all_patterns = []
for key, app in applications.items():
app_patterns = []
for p in per_app_patterns:
if key == "/":
route = p[0]
else:
route = key + p[0]
route = self._prefix + route
app_patterns.append((route, p[1], { "application_context" : self._applications[key] }))
websocket_path = None
for r in app_patterns:
if r[0].endswith("/ws"):
websocket_path = r[0]
if not websocket_path:
raise RuntimeError("Couldn't find websocket path")
for r in app_patterns:
r[2]["bokeh_websocket_path"] = websocket_path
all_patterns.extend(app_patterns)
# add a per-app static path if requested by the application
if app.static_path is not None:
if key == "/":
route = "/static/(.*)"
else:
route = key + "/static/(.*)"
route = self._prefix + route
all_patterns.append((route, StaticFileHandler, { "path" : app.static_path }))
for p in extra_patterns + toplevel_patterns:
prefixed_pat = (self._prefix+p[0],) + p[1:]
all_patterns.append(prefixed_pat)
for pat in all_patterns:
_whitelist(pat[1])
log.debug("Patterns are:")
for line in pformat(all_patterns, width=60).split("\n"):
log.debug(" " + line)
super(BokehTornado, self).__init__(all_patterns)
self._clients = set()
self._executor = ProcessPoolExecutor(max_workers=4)
self._loop.add_callback(self._start_async)
self._stats_job = PeriodicCallback(self.log_stats,
stats_log_frequency_milliseconds,
io_loop=self._loop)
self._unused_session_linger_milliseconds = unused_session_lifetime_milliseconds
self._cleanup_job = PeriodicCallback(self.cleanup_sessions,
check_unused_sessions_milliseconds,
io_loop=self._loop)
if keep_alive_milliseconds > 0:
self._ping_job = PeriodicCallback(self.keep_alive, keep_alive_milliseconds, io_loop=self._loop)
else:
self._ping_job = None
@property
def io_loop(self):
return self._loop
@property
def websocket_origins(self):
return self._websocket_origins
@property
def secret_key(self):
return self._secret_key
@property
def sign_sessions(self):
return self._sign_sessions
@property
def generate_session_ids(self):
return self._generate_session_ids
def root_url_for_request(self, request):
return request.protocol + "://" + request.host + self._prefix + "/"
def websocket_url_for_request(self, request, websocket_path):
# websocket_path comes from the handler, and already has any
# prefix included, no need to add here
protocol = "ws"
if request.protocol == "https":
protocol = "wss"
return protocol + "://" + request.host + websocket_path
def resources(self, request):
root_url = self.root_url_for_request(request)
if root_url not in self._resources:
self._resources[root_url] = Resources(mode="server",
root_url=root_url,
path_versioner=StaticHandler.append_version)
return self._resources[root_url]
def start(self, start_loop=True):
''' Start the Bokeh Server application main loop.
Args:
start_loop (boolean): False to not actually start event loop, used in tests
Returns:
None
Notes:
Keyboard interrupts or sigterm will cause the server to shut down.
'''
self._stats_job.start()
self._cleanup_job.start()
if self._ping_job is not None:
self._ping_job.start()
for context in self._applications.values():
context.run_load_hook()
if start_loop:
try:
self._loop.start()
except KeyboardInterrupt:
print("\nInterrupted, shutting down")
def stop(self):
''' Stop the Bokeh Server application.
Returns:
None
'''
# TODO we should probably close all connections and shut
# down all sessions either here or in unlisten() ... but
# it isn't that important since in real life it's rare to
# do a clean shutdown (vs. a kill-by-signal) anyhow.
for context in self._applications.values():
context.run_unload_hook()
self._stats_job.stop()
self._cleanup_job.stop()
if self._ping_job is not None:
self._ping_job.stop()
self._loop.stop()
@property
def executor(self):
return self._executor
def new_connection(self, protocol, socket, application_context, session):
connection = ServerConnection(protocol, socket, application_context, session)
self._clients.add(connection)
return connection
def client_lost(self, connection):
self._clients.discard(connection)
connection.detach_session()
def get_session(self, app_path, session_id):
if app_path not in self._applications:
raise ValueError("Application %s does not exist on this server" % app_path)
return self._applications[app_path].get_session(session_id)
def get_sessions(self, app_path):
if app_path not in self._applications:
raise ValueError("Application %s does not exist on this server" % app_path)
return list(self._applications[app_path].sessions)
@gen.coroutine
def cleanup_sessions(self):
for app in self._applications.values():
yield app.cleanup_sessions(self._unused_session_linger_milliseconds)
raise gen.Return(None)
def log_stats(self):
if log.getEffectiveLevel() > logging.DEBUG:
# avoid the work below if we aren't going to log anything
return
log.debug("[pid %d] %d clients connected", os.getpid(), len(self._clients))
for app_path, app in self._applications.items():
sessions = list(app.sessions)
unused_count = 0
for s in sessions:
if s.connection_count == 0:
unused_count += 1
log.debug("[pid %d] %s has %d sessions with %d unused",
os.getpid(), app_path, len(sessions), unused_count)
def keep_alive(self):
for c in self._clients:
c.send_ping()
@gen.coroutine
def run_in_background(self, _func, *args, **kwargs):
"""
Run a synchronous function in the background without disrupting
the main thread. Useful for long-running jobs.
"""
res = yield self._executor.submit(_func, *args, **kwargs)
raise gen.Return(res)
@gen.coroutine
def _start_async(self):
try:
atexit.register(self._atexit)
signal.signal(signal.SIGTERM, self._sigterm)
except Exception:
self.exit(1)
_atexit_ran = False
def _atexit(self):
if self._atexit_ran:
return
self._atexit_ran = True
self._stats_job.stop()
IOLoop.clear_current()
loop = IOLoop()
loop.make_current()
loop.run_sync(self._cleanup)
def _sigterm(self, signum, frame):
print("Received SIGTERM, shutting down")
self.stop()
self._atexit()
@gen.coroutine
def _cleanup(self):
log.debug("Shutdown: cleaning up")
self._executor.shutdown(wait=False)
self._clients.clear()
| bsd-3-clause |
minhphung171093/GreenERP_V7 | openerp/addons/report_webkit/webkit_report.py | 16 | 15315 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
# Contributor(s) : Florent Xicluna (Wingo SA)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import subprocess
import os
import sys
from openerp import report
import tempfile
import time
import logging
from functools import partial
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
from openerp import netsvc
from openerp import pooler
from report_helper import WebKitHelper
from openerp.report.report_sxw import *
from openerp import addons
from openerp import tools
from openerp.tools.translate import _
from openerp.osv.osv import except_osv
_logger = logging.getLogger(__name__)
def mako_template(text):
"""Build a Mako template.
This template uses UTF-8 encoding
"""
tmp_lookup = TemplateLookup() #we need it in order to allow inclusion and inheritance
return Template(text, input_encoding='utf-8', output_encoding='utf-8', lookup=tmp_lookup)
class WebKitParser(report_sxw):
"""Custom class that use webkit to render HTML reports
Code partially taken from report openoffice. Thanks guys :)
"""
def __init__(self, name, table, rml=False, parser=False,
header=True, store=False):
self.localcontext = {}
report_sxw.__init__(self, name, table, rml, parser,
header, store)
def get_lib(self, cursor, uid):
"""Return the lib wkhtml path"""
proxy = self.pool.get('ir.config_parameter')
webkit_path = proxy.get_param(cursor, uid, 'webkit_path')
if not webkit_path:
try:
defpath = os.environ.get('PATH', os.defpath).split(os.pathsep)
if hasattr(sys, 'frozen'):
defpath.append(os.getcwd())
if tools.config['root_path']:
defpath.append(os.path.dirname(tools.config['root_path']))
webkit_path = tools.which('wkhtmltopdf', path=os.pathsep.join(defpath))
except IOError:
webkit_path = None
if webkit_path:
return webkit_path
raise except_osv(
_('Wkhtmltopdf library path is not set'),
_('Please install executable on your system' \
' (sudo apt-get install wkhtmltopdf) or download it from here:' \
' http://code.google.com/p/wkhtmltopdf/downloads/list and set the' \
' path in the ir.config_parameter with the webkit_path key.' \
'Minimal version is 0.9.9')
)
def generate_pdf(self, comm_path, report_xml, header, footer, html_list, webkit_header=False):
"""Call webkit in order to generate pdf"""
if not webkit_header:
webkit_header = report_xml.webkit_header
fd, out_filename = tempfile.mkstemp(suffix=".pdf",
prefix="webkit.tmp.")
file_to_del = [out_filename]
if comm_path:
command = [comm_path]
else:
command = ['wkhtmltopdf']
command.append('--quiet')
# default to UTF-8 encoding. Use <meta charset="latin-1"> to override.
command.extend(['--encoding', 'utf-8'])
if header :
with tempfile.NamedTemporaryFile(suffix=".head.html",
delete=False) as head_file:
head_file.write(self._sanitize_html(header))
file_to_del.append(head_file.name)
command.extend(['--header-html', head_file.name])
if footer :
with tempfile.NamedTemporaryFile(suffix=".foot.html",
delete=False) as foot_file:
foot_file.write(self._sanitize_html(footer))
file_to_del.append(foot_file.name)
command.extend(['--footer-html', foot_file.name])
if webkit_header.margin_top :
command.extend(['--margin-top', str(webkit_header.margin_top).replace(',', '.')])
if webkit_header.margin_bottom :
command.extend(['--margin-bottom', str(webkit_header.margin_bottom).replace(',', '.')])
if webkit_header.margin_left :
command.extend(['--margin-left', str(webkit_header.margin_left).replace(',', '.')])
if webkit_header.margin_right :
command.extend(['--margin-right', str(webkit_header.margin_right).replace(',', '.')])
if webkit_header.orientation :
command.extend(['--orientation', str(webkit_header.orientation).replace(',', '.')])
if webkit_header.format :
command.extend(['--page-size', str(webkit_header.format).replace(',', '.')])
count = 0
for html in html_list :
with tempfile.NamedTemporaryFile(suffix="%d.body.html" %count,
delete=False) as html_file:
count += 1
html_file.write(self._sanitize_html(html))
file_to_del.append(html_file.name)
command.append(html_file.name)
command.append(out_filename)
stderr_fd, stderr_path = tempfile.mkstemp(text=True)
file_to_del.append(stderr_path)
try:
status = subprocess.call(command, stderr=stderr_fd)
os.close(stderr_fd) # ensure flush before reading
stderr_fd = None # avoid closing again in finally block
fobj = open(stderr_path, 'r')
error_message = fobj.read()
fobj.close()
if not error_message:
error_message = _('No diagnosis message was provided')
else:
error_message = _('The following diagnosis message was provided:\n') + error_message
if status :
raise except_osv(_('Webkit error' ),
_("The command 'wkhtmltopdf' failed with error code = %s. Message: %s") % (status, error_message))
with open(out_filename, 'rb') as pdf_file:
pdf = pdf_file.read()
os.close(fd)
finally:
if stderr_fd is not None:
os.close(stderr_fd)
for f_to_del in file_to_del:
try:
os.unlink(f_to_del)
except (OSError, IOError), exc:
_logger.error('cannot remove file %s: %s', f_to_del, exc)
return pdf
def translate_call(self, parser_instance, src):
"""Translate String."""
ir_translation = self.pool['ir.translation']
name = self.tmpl and 'addons/' + self.tmpl or None
res = ir_translation._get_source(parser_instance.cr, parser_instance.uid,
name, 'report', parser_instance.localcontext.get('lang', 'en_US'), src)
if res == src:
# no translation defined, fallback on None (backward compatibility)
res = ir_translation._get_source(parser_instance.cr, parser_instance.uid,
None, 'report', parser_instance.localcontext.get('lang', 'en_US'), src)
if not res :
return src
return res
# override needed to keep the attachments storing procedure
def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None):
"""generate the PDF"""
if context is None:
context={}
htmls = []
if report_xml.report_type != 'webkit':
return super(WebKitParser,self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context)
parser_instance = self.parser(cursor,
uid,
self.name2,
context=context)
self.pool = pooler.get_pool(cursor.dbname)
objs = self.getObjects(cursor, uid, ids, context)
parser_instance.set_context(objs, data, ids, report_xml.report_type)
template = False
if report_xml.report_file :
# backward-compatible if path in Windows format
report_path = report_xml.report_file.replace("\\", "/")
path = addons.get_module_resource(*report_path.split('/'))
if path and os.path.exists(path) :
template = file(path).read()
if not template and report_xml.report_webkit_data :
template = report_xml.report_webkit_data
if not template :
raise except_osv(_('Error!'), _('Webkit report template not found!'))
header = report_xml.webkit_header.html
footer = report_xml.webkit_header.footer_html
if not header and report_xml.header:
raise except_osv(
_('No header defined for this Webkit report!'),
_('Please set a header in company settings.')
)
if not report_xml.header :
header = ''
default_head = addons.get_module_resource('report_webkit', 'default_header.html')
with open(default_head,'r') as f:
header = f.read()
css = report_xml.webkit_header.css
if not css :
css = ''
translate_call = partial(self.translate_call, parser_instance)
#default_filters=['unicode', 'entity'] can be used to set global filter
body_mako_tpl = mako_template(template)
helper = WebKitHelper(cursor, uid, report_xml.id, context)
if report_xml.precise_mode:
for obj in objs:
parser_instance.localcontext['objects'] = [obj]
try :
html = body_mako_tpl.render(helper=helper,
css=css,
_=translate_call,
**parser_instance.localcontext)
htmls.append(html)
except Exception:
msg = exceptions.text_error_template().render()
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
else:
try :
html = body_mako_tpl.render(helper=helper,
css=css,
_=translate_call,
**parser_instance.localcontext)
htmls.append(html)
except Exception:
msg = exceptions.text_error_template().render()
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
head_mako_tpl = mako_template(header)
try :
head = head_mako_tpl.render(helper=helper,
css=css,
_=translate_call,
_debug=False,
**parser_instance.localcontext)
except Exception:
raise except_osv(_('Webkit render!'),
exceptions.text_error_template().render())
foot = False
if footer :
foot_mako_tpl = mako_template(footer)
try :
foot = foot_mako_tpl.render(helper=helper,
css=css,
_=translate_call,
**parser_instance.localcontext)
except:
msg = exceptions.text_error_template().render()
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
if report_xml.webkit_debug :
try :
deb = head_mako_tpl.render(helper=helper,
css=css,
_debug=tools.ustr("\n".join(htmls)),
_=translate_call,
**parser_instance.localcontext)
except Exception:
msg = exceptions.text_error_template().render()
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
return (deb, 'html')
bin = self.get_lib(cursor, uid)
pdf = self.generate_pdf(bin, report_xml, head, foot, htmls)
return (pdf, 'pdf')
def create(self, cursor, uid, ids, data, context=None):
"""We override the create function in order to handle generator
Code taken from report openoffice. Thanks guys :) """
pool = pooler.get_pool(cursor.dbname)
ir_obj = pool.get('ir.actions.report.xml')
report_xml_ids = ir_obj.search(cursor, uid,
[('report_name', '=', self.name[7:])], context=context)
if report_xml_ids:
report_xml = ir_obj.browse(cursor,
uid,
report_xml_ids[0],
context=context)
report_xml.report_rml = None
report_xml.report_rml_content = None
report_xml.report_sxw_content_data = None
report_xml.report_sxw_content = None
report_xml.report_sxw = None
else:
return super(WebKitParser, self).create(cursor, uid, ids, data, context)
if report_xml.report_type != 'webkit':
return super(WebKitParser, self).create(cursor, uid, ids, data, context)
result = self.create_source_pdf(cursor, uid, ids, data, report_xml, context)
if not result:
return (False,False)
return result
def _sanitize_html(self, html):
"""wkhtmltopdf expects the html page to declare a doctype.
"""
if html and html[:9].upper() != "<!DOCTYPE":
html = "<!DOCTYPE html>\n" + html
return html
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Hoekz/hackness-monster | venv/lib/python2.7/site-packages/werkzeug/routing.py | 174 | 66350 | # -*- coding: utf-8 -*-
"""
werkzeug.routing
~~~~~~~~~~~~~~~~
When it comes to combining multiple controller or view functions (however
you want to call them) you need a dispatcher. A simple way would be
applying regular expression tests on the ``PATH_INFO`` and calling
registered callback functions that return the value then.
This module implements a much more powerful system than simple regular
expression matching because it can also convert values in the URLs and
build URLs.
Here a simple example that creates an URL map for an application with
two subdomains (www and kb) and some URL rules:
>>> m = Map([
... # Static URLs
... Rule('/', endpoint='static/index'),
... Rule('/about', endpoint='static/about'),
... Rule('/help', endpoint='static/help'),
... # Knowledge Base
... Subdomain('kb', [
... Rule('/', endpoint='kb/index'),
... Rule('/browse/', endpoint='kb/browse'),
... Rule('/browse/<int:id>/', endpoint='kb/browse'),
... Rule('/browse/<int:id>/<int:page>', endpoint='kb/browse')
... ])
... ], default_subdomain='www')
If the application doesn't use subdomains it's perfectly fine to not set
the default subdomain and not use the `Subdomain` rule factory. The endpoint
in the rules can be anything, for example import paths or unique
identifiers. The WSGI application can use those endpoints to get the
handler for that URL. It doesn't have to be a string at all but it's
recommended.
Now it's possible to create a URL adapter for one of the subdomains and
build URLs:
>>> c = m.bind('example.com')
>>> c.build("kb/browse", dict(id=42))
'http://kb.example.com/browse/42/'
>>> c.build("kb/browse", dict())
'http://kb.example.com/browse/'
>>> c.build("kb/browse", dict(id=42, page=3))
'http://kb.example.com/browse/42/3'
>>> c.build("static/about")
'/about'
>>> c.build("static/index", force_external=True)
'http://www.example.com/'
>>> c = m.bind('example.com', subdomain='kb')
>>> c.build("static/about")
'http://www.example.com/about'
The first argument to bind is the server name *without* the subdomain.
Per default it will assume that the script is mounted on the root, but
often that's not the case so you can provide the real mount point as
second argument:
>>> c = m.bind('example.com', '/applications/example')
The third argument can be the subdomain, if not given the default
subdomain is used. For more details about binding have a look at the
documentation of the `MapAdapter`.
And here is how you can match URLs:
>>> c = m.bind('example.com')
>>> c.match("/")
('static/index', {})
>>> c.match("/about")
('static/about', {})
>>> c = m.bind('example.com', '/', 'kb')
>>> c.match("/")
('kb/index', {})
>>> c.match("/browse/42/23")
('kb/browse', {'id': 42, 'page': 23})
If matching fails you get a `NotFound` exception, if the rule thinks
it's a good idea to redirect (for example because the URL was defined
to have a slash at the end but the request was missing that slash) it
will raise a `RequestRedirect` exception. Both are subclasses of the
`HTTPException` so you can use those errors as responses in the
application.
If matching succeeded but the URL rule was incompatible to the given
method (for example there were only rules for `GET` and `HEAD` and
routing system tried to match a `POST` request) a `MethodNotAllowed`
method is raised.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import difflib
import re
import uuid
import posixpath
from pprint import pformat
from threading import Lock
from werkzeug.urls import url_encode, url_quote, url_join
from werkzeug.utils import redirect, format_string
from werkzeug.exceptions import HTTPException, NotFound, MethodNotAllowed, \
BadHost
from werkzeug._internal import _get_environ, _encode_idna
from werkzeug._compat import itervalues, iteritems, to_unicode, to_bytes, \
text_type, string_types, native_string_result, \
implements_to_string, wsgi_decoding_dance
from werkzeug.datastructures import ImmutableDict, MultiDict
_rule_re = re.compile(r'''
(?P<static>[^<]*) # static rule data
<
(?:
(?P<converter>[a-zA-Z_][a-zA-Z0-9_]*) # converter name
(?:\((?P<args>.*?)\))? # converter arguments
\: # variable delimiter
)?
(?P<variable>[a-zA-Z_][a-zA-Z0-9_]*) # variable name
>
''', re.VERBOSE)
_simple_rule_re = re.compile(r'<([^>]+)>')
_converter_args_re = re.compile(r'''
((?P<name>\w+)\s*=\s*)?
(?P<value>
True|False|
\d+.\d+|
\d+.|
\d+|
\w+|
[urUR]?(?P<stringval>"[^"]*?"|'[^']*')
)\s*,
''', re.VERBOSE | re.UNICODE)
_PYTHON_CONSTANTS = {
'None': None,
'True': True,
'False': False
}
def _pythonize(value):
if value in _PYTHON_CONSTANTS:
return _PYTHON_CONSTANTS[value]
for convert in int, float:
try:
return convert(value)
except ValueError:
pass
if value[:1] == value[-1:] and value[0] in '"\'':
value = value[1:-1]
return text_type(value)
def parse_converter_args(argstr):
argstr += ','
args = []
kwargs = {}
for item in _converter_args_re.finditer(argstr):
value = item.group('stringval')
if value is None:
value = item.group('value')
value = _pythonize(value)
if not item.group('name'):
args.append(value)
else:
name = item.group('name')
kwargs[name] = value
return tuple(args), kwargs
def parse_rule(rule):
"""Parse a rule and return it as generator. Each iteration yields tuples
in the form ``(converter, arguments, variable)``. If the converter is
`None` it's a static url part, otherwise it's a dynamic one.
:internal:
"""
pos = 0
end = len(rule)
do_match = _rule_re.match
used_names = set()
while pos < end:
m = do_match(rule, pos)
if m is None:
break
data = m.groupdict()
if data['static']:
yield None, None, data['static']
variable = data['variable']
converter = data['converter'] or 'default'
if variable in used_names:
raise ValueError('variable name %r used twice.' % variable)
used_names.add(variable)
yield converter, data['args'] or None, variable
pos = m.end()
if pos < end:
remaining = rule[pos:]
if '>' in remaining or '<' in remaining:
raise ValueError('malformed url rule: %r' % rule)
yield None, None, remaining
class RoutingException(Exception):
"""Special exceptions that require the application to redirect, notifying
about missing urls, etc.
:internal:
"""
class RequestRedirect(HTTPException, RoutingException):
"""Raise if the map requests a redirect. This is for example the case if
`strict_slashes` are activated and an url that requires a trailing slash.
The attribute `new_url` contains the absolute destination url.
"""
code = 301
def __init__(self, new_url):
RoutingException.__init__(self, new_url)
self.new_url = new_url
def get_response(self, environ):
return redirect(self.new_url, self.code)
class RequestSlash(RoutingException):
"""Internal exception."""
class RequestAliasRedirect(RoutingException):
"""This rule is an alias and wants to redirect to the canonical URL."""
def __init__(self, matched_values):
self.matched_values = matched_values
class BuildError(RoutingException, LookupError):
"""Raised if the build system cannot find a URL for an endpoint with the
values provided.
"""
def __init__(self, endpoint, values, method, adapter=None):
LookupError.__init__(self, endpoint, values, method)
self.endpoint = endpoint
self.values = values
self.method = method
self.suggested = self.closest_rule(adapter)
def closest_rule(self, adapter):
def score_rule(rule):
return sum([
0.98 * difflib.SequenceMatcher(
None, rule.endpoint, self.endpoint
).ratio(),
0.01 * bool(set(self.values or ()).issubset(rule.arguments)),
0.01 * bool(rule.methods and self.method in rule.methods)
])
if adapter and adapter.map._rules:
return max(adapter.map._rules, key=score_rule)
else:
return None
def __str__(self):
message = []
message.append("Could not build url for endpoint %r" % self.endpoint)
if self.method:
message.append(" (%r)" % self.method)
if self.values:
message.append(" with values %r" % sorted(self.values.keys()))
message.append(".")
if self.suggested:
if self.endpoint == self.suggested.endpoint:
if self.method and self.method not in self.suggested.methods:
message.append(" Did you mean to use methods %r?" % sorted(
self.suggested.methods
))
missing_values = self.suggested.arguments.union(
set(self.suggested.defaults or ())
) - set(self.values.keys())
if missing_values:
message.append(
" Did you forget to specify values %r?" %
sorted(missing_values)
)
else:
message.append(
" Did you mean %r instead?" % self.suggested.endpoint
)
return "".join(message)
class ValidationError(ValueError):
"""Validation error. If a rule converter raises this exception the rule
does not match the current URL and the next URL is tried.
"""
class RuleFactory(object):
"""As soon as you have more complex URL setups it's a good idea to use rule
factories to avoid repetitive tasks. Some of them are builtin, others can
be added by subclassing `RuleFactory` and overriding `get_rules`.
"""
def get_rules(self, map):
"""Subclasses of `RuleFactory` have to override this method and return
an iterable of rules."""
raise NotImplementedError()
class Subdomain(RuleFactory):
"""All URLs provided by this factory have the subdomain set to a
specific domain. For example if you want to use the subdomain for
the current language this can be a good setup::
url_map = Map([
Rule('/', endpoint='#select_language'),
Subdomain('<string(length=2):lang_code>', [
Rule('/', endpoint='index'),
Rule('/about', endpoint='about'),
Rule('/help', endpoint='help')
])
])
All the rules except for the ``'#select_language'`` endpoint will now
listen on a two letter long subdomain that holds the language code
for the current request.
"""
def __init__(self, subdomain, rules):
self.subdomain = subdomain
self.rules = rules
def get_rules(self, map):
for rulefactory in self.rules:
for rule in rulefactory.get_rules(map):
rule = rule.empty()
rule.subdomain = self.subdomain
yield rule
class Submount(RuleFactory):
"""Like `Subdomain` but prefixes the URL rule with a given string::
url_map = Map([
Rule('/', endpoint='index'),
Submount('/blog', [
Rule('/', endpoint='blog/index'),
Rule('/entry/<entry_slug>', endpoint='blog/show')
])
])
Now the rule ``'blog/show'`` matches ``/blog/entry/<entry_slug>``.
"""
def __init__(self, path, rules):
self.path = path.rstrip('/')
self.rules = rules
def get_rules(self, map):
for rulefactory in self.rules:
for rule in rulefactory.get_rules(map):
rule = rule.empty()
rule.rule = self.path + rule.rule
yield rule
class EndpointPrefix(RuleFactory):
"""Prefixes all endpoints (which must be strings for this factory) with
another string. This can be useful for sub applications::
url_map = Map([
Rule('/', endpoint='index'),
EndpointPrefix('blog/', [Submount('/blog', [
Rule('/', endpoint='index'),
Rule('/entry/<entry_slug>', endpoint='show')
])])
])
"""
def __init__(self, prefix, rules):
self.prefix = prefix
self.rules = rules
def get_rules(self, map):
for rulefactory in self.rules:
for rule in rulefactory.get_rules(map):
rule = rule.empty()
rule.endpoint = self.prefix + rule.endpoint
yield rule
class RuleTemplate(object):
"""Returns copies of the rules wrapped and expands string templates in
the endpoint, rule, defaults or subdomain sections.
Here a small example for such a rule template::
from werkzeug.routing import Map, Rule, RuleTemplate
resource = RuleTemplate([
Rule('/$name/', endpoint='$name.list'),
Rule('/$name/<int:id>', endpoint='$name.show')
])
url_map = Map([resource(name='user'), resource(name='page')])
When a rule template is called the keyword arguments are used to
replace the placeholders in all the string parameters.
"""
def __init__(self, rules):
self.rules = list(rules)
def __call__(self, *args, **kwargs):
return RuleTemplateFactory(self.rules, dict(*args, **kwargs))
class RuleTemplateFactory(RuleFactory):
"""A factory that fills in template variables into rules. Used by
`RuleTemplate` internally.
:internal:
"""
def __init__(self, rules, context):
self.rules = rules
self.context = context
def get_rules(self, map):
for rulefactory in self.rules:
for rule in rulefactory.get_rules(map):
new_defaults = subdomain = None
if rule.defaults:
new_defaults = {}
for key, value in iteritems(rule.defaults):
if isinstance(value, string_types):
value = format_string(value, self.context)
new_defaults[key] = value
if rule.subdomain is not None:
subdomain = format_string(rule.subdomain, self.context)
new_endpoint = rule.endpoint
if isinstance(new_endpoint, string_types):
new_endpoint = format_string(new_endpoint, self.context)
yield Rule(
format_string(rule.rule, self.context),
new_defaults,
subdomain,
rule.methods,
rule.build_only,
new_endpoint,
rule.strict_slashes
)
@implements_to_string
class Rule(RuleFactory):
"""A Rule represents one URL pattern. There are some options for `Rule`
that change the way it behaves and are passed to the `Rule` constructor.
Note that besides the rule-string all arguments *must* be keyword arguments
in order to not break the application on Werkzeug upgrades.
`string`
Rule strings basically are just normal URL paths with placeholders in
the format ``<converter(arguments):name>`` where the converter and the
arguments are optional. If no converter is defined the `default`
converter is used which means `string` in the normal configuration.
URL rules that end with a slash are branch URLs, others are leaves.
If you have `strict_slashes` enabled (which is the default), all
branch URLs that are matched without a trailing slash will trigger a
redirect to the same URL with the missing slash appended.
The converters are defined on the `Map`.
`endpoint`
The endpoint for this rule. This can be anything. A reference to a
function, a string, a number etc. The preferred way is using a string
because the endpoint is used for URL generation.
`defaults`
An optional dict with defaults for other rules with the same endpoint.
This is a bit tricky but useful if you want to have unique URLs::
url_map = Map([
Rule('/all/', defaults={'page': 1}, endpoint='all_entries'),
Rule('/all/page/<int:page>', endpoint='all_entries')
])
If a user now visits ``http://example.com/all/page/1`` he will be
redirected to ``http://example.com/all/``. If `redirect_defaults` is
disabled on the `Map` instance this will only affect the URL
generation.
`subdomain`
The subdomain rule string for this rule. If not specified the rule
only matches for the `default_subdomain` of the map. If the map is
not bound to a subdomain this feature is disabled.
Can be useful if you want to have user profiles on different subdomains
and all subdomains are forwarded to your application::
url_map = Map([
Rule('/', subdomain='<username>', endpoint='user/homepage'),
Rule('/stats', subdomain='<username>', endpoint='user/stats')
])
`methods`
A sequence of http methods this rule applies to. If not specified, all
methods are allowed. For example this can be useful if you want different
endpoints for `POST` and `GET`. If methods are defined and the path
matches but the method matched against is not in this list or in the
list of another rule for that path the error raised is of the type
`MethodNotAllowed` rather than `NotFound`. If `GET` is present in the
list of methods and `HEAD` is not, `HEAD` is added automatically.
.. versionchanged:: 0.6.1
`HEAD` is now automatically added to the methods if `GET` is
present. The reason for this is that existing code often did not
work properly in servers not rewriting `HEAD` to `GET`
automatically and it was not documented how `HEAD` should be
treated. This was considered a bug in Werkzeug because of that.
`strict_slashes`
Override the `Map` setting for `strict_slashes` only for this rule. If
not specified the `Map` setting is used.
`build_only`
Set this to True and the rule will never match but will create a URL
that can be build. This is useful if you have resources on a subdomain
or folder that are not handled by the WSGI application (like static data)
`redirect_to`
If given this must be either a string or callable. In case of a
callable it's called with the url adapter that triggered the match and
the values of the URL as keyword arguments and has to return the target
for the redirect, otherwise it has to be a string with placeholders in
rule syntax::
def foo_with_slug(adapter, id):
# ask the database for the slug for the old id. this of
# course has nothing to do with werkzeug.
return 'foo/' + Foo.get_slug_for_id(id)
url_map = Map([
Rule('/foo/<slug>', endpoint='foo'),
Rule('/some/old/url/<slug>', redirect_to='foo/<slug>'),
Rule('/other/old/url/<int:id>', redirect_to=foo_with_slug)
])
When the rule is matched the routing system will raise a
`RequestRedirect` exception with the target for the redirect.
Keep in mind that the URL will be joined against the URL root of the
script so don't use a leading slash on the target URL unless you
really mean root of that domain.
`alias`
If enabled this rule serves as an alias for another rule with the same
endpoint and arguments.
`host`
If provided and the URL map has host matching enabled this can be
used to provide a match rule for the whole host. This also means
that the subdomain feature is disabled.
.. versionadded:: 0.7
The `alias` and `host` parameters were added.
"""
def __init__(self, string, defaults=None, subdomain=None, methods=None,
build_only=False, endpoint=None, strict_slashes=None,
redirect_to=None, alias=False, host=None):
if not string.startswith('/'):
raise ValueError('urls must start with a leading slash')
self.rule = string
self.is_leaf = not string.endswith('/')
self.map = None
self.strict_slashes = strict_slashes
self.subdomain = subdomain
self.host = host
self.defaults = defaults
self.build_only = build_only
self.alias = alias
if methods is None:
self.methods = None
else:
self.methods = set([x.upper() for x in methods])
if 'HEAD' not in self.methods and 'GET' in self.methods:
self.methods.add('HEAD')
self.endpoint = endpoint
self.redirect_to = redirect_to
if defaults:
self.arguments = set(map(str, defaults))
else:
self.arguments = set()
self._trace = self._converters = self._regex = self._weights = None
def empty(self):
"""
Return an unbound copy of this rule.
This can be useful if want to reuse an already bound URL for another
map. See ``get_empty_kwargs`` to override what keyword arguments are
provided to the new copy.
"""
return type(self)(self.rule, **self.get_empty_kwargs())
def get_empty_kwargs(self):
"""
Provides kwargs for instantiating empty copy with empty()
Use this method to provide custom keyword arguments to the subclass of
``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass
has custom keyword arguments that are needed at instantiation.
Must return a ``dict`` that will be provided as kwargs to the new
instance of ``Rule``, following the initial ``self.rule`` value which
is always provided as the first, required positional argument.
"""
defaults = None
if self.defaults:
defaults = dict(self.defaults)
return dict(defaults=defaults, subdomain=self.subdomain,
methods=self.methods, build_only=self.build_only,
endpoint=self.endpoint, strict_slashes=self.strict_slashes,
redirect_to=self.redirect_to, alias=self.alias,
host=self.host)
def get_rules(self, map):
yield self
def refresh(self):
"""Rebinds and refreshes the URL. Call this if you modified the
rule in place.
:internal:
"""
self.bind(self.map, rebind=True)
def bind(self, map, rebind=False):
"""Bind the url to a map and create a regular expression based on
the information from the rule itself and the defaults from the map.
:internal:
"""
if self.map is not None and not rebind:
raise RuntimeError('url rule %r already bound to map %r' %
(self, self.map))
self.map = map
if self.strict_slashes is None:
self.strict_slashes = map.strict_slashes
if self.subdomain is None:
self.subdomain = map.default_subdomain
self.compile()
def get_converter(self, variable_name, converter_name, args, kwargs):
"""Looks up the converter for the given parameter.
.. versionadded:: 0.9
"""
if converter_name not in self.map.converters:
raise LookupError('the converter %r does not exist' % converter_name)
return self.map.converters[converter_name](self.map, *args, **kwargs)
def compile(self):
"""Compiles the regular expression and stores it."""
assert self.map is not None, 'rule not bound'
if self.map.host_matching:
domain_rule = self.host or ''
else:
domain_rule = self.subdomain or ''
self._trace = []
self._converters = {}
self._weights = []
regex_parts = []
def _build_regex(rule):
for converter, arguments, variable in parse_rule(rule):
if converter is None:
regex_parts.append(re.escape(variable))
self._trace.append((False, variable))
for part in variable.split('/'):
if part:
self._weights.append((0, -len(part)))
else:
if arguments:
c_args, c_kwargs = parse_converter_args(arguments)
else:
c_args = ()
c_kwargs = {}
convobj = self.get_converter(
variable, converter, c_args, c_kwargs)
regex_parts.append('(?P<%s>%s)' % (variable, convobj.regex))
self._converters[variable] = convobj
self._trace.append((True, variable))
self._weights.append((1, convobj.weight))
self.arguments.add(str(variable))
_build_regex(domain_rule)
regex_parts.append('\\|')
self._trace.append((False, '|'))
_build_regex(self.is_leaf and self.rule or self.rule.rstrip('/'))
if not self.is_leaf:
self._trace.append((False, '/'))
if self.build_only:
return
regex = r'^%s%s$' % (
u''.join(regex_parts),
(not self.is_leaf or not self.strict_slashes) and
'(?<!/)(?P<__suffix__>/?)' or ''
)
self._regex = re.compile(regex, re.UNICODE)
def match(self, path):
"""Check if the rule matches a given path. Path is a string in the
form ``"subdomain|/path(method)"`` and is assembled by the map. If
the map is doing host matching the subdomain part will be the host
instead.
If the rule matches a dict with the converted values is returned,
otherwise the return value is `None`.
:internal:
"""
if not self.build_only:
m = self._regex.search(path)
if m is not None:
groups = m.groupdict()
# we have a folder like part of the url without a trailing
# slash and strict slashes enabled. raise an exception that
# tells the map to redirect to the same url but with a
# trailing slash
if self.strict_slashes and not self.is_leaf and \
not groups.pop('__suffix__'):
raise RequestSlash()
# if we are not in strict slashes mode we have to remove
# a __suffix__
elif not self.strict_slashes:
del groups['__suffix__']
result = {}
for name, value in iteritems(groups):
try:
value = self._converters[name].to_python(value)
except ValidationError:
return
result[str(name)] = value
if self.defaults:
result.update(self.defaults)
if self.alias and self.map.redirect_defaults:
raise RequestAliasRedirect(result)
return result
def build(self, values, append_unknown=True):
"""Assembles the relative url for that rule and the subdomain.
If building doesn't work for some reasons `None` is returned.
:internal:
"""
tmp = []
add = tmp.append
processed = set(self.arguments)
for is_dynamic, data in self._trace:
if is_dynamic:
try:
add(self._converters[data].to_url(values[data]))
except ValidationError:
return
processed.add(data)
else:
add(url_quote(to_bytes(data, self.map.charset), safe='/:|+'))
domain_part, url = (u''.join(tmp)).split(u'|', 1)
if append_unknown:
query_vars = MultiDict(values)
for key in processed:
if key in query_vars:
del query_vars[key]
if query_vars:
url += u'?' + url_encode(query_vars, charset=self.map.charset,
sort=self.map.sort_parameters,
key=self.map.sort_key)
return domain_part, url
def provides_defaults_for(self, rule):
"""Check if this rule has defaults for a given rule.
:internal:
"""
return not self.build_only and self.defaults and \
self.endpoint == rule.endpoint and self != rule and \
self.arguments == rule.arguments
def suitable_for(self, values, method=None):
"""Check if the dict of values has enough data for url generation.
:internal:
"""
# if a method was given explicitly and that method is not supported
# by this rule, this rule is not suitable.
if method is not None and self.methods is not None \
and method not in self.methods:
return False
defaults = self.defaults or ()
# all arguments required must be either in the defaults dict or
# the value dictionary otherwise it's not suitable
for key in self.arguments:
if key not in defaults and key not in values:
return False
# in case defaults are given we ensure taht either the value was
# skipped or the value is the same as the default value.
if defaults:
for key, value in iteritems(defaults):
if key in values and value != values[key]:
return False
return True
def match_compare_key(self):
"""The match compare key for sorting.
Current implementation:
1. rules without any arguments come first for performance
reasons only as we expect them to match faster and some
common ones usually don't have any arguments (index pages etc.)
2. The more complex rules come first so the second argument is the
negative length of the number of weights.
3. lastly we order by the actual weights.
:internal:
"""
return bool(self.arguments), -len(self._weights), self._weights
def build_compare_key(self):
"""The build compare key for sorting.
:internal:
"""
return self.alias and 1 or 0, -len(self.arguments), \
-len(self.defaults or ())
def __eq__(self, other):
return self.__class__ is other.__class__ and \
self._trace == other._trace
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return self.rule
@native_string_result
def __repr__(self):
if self.map is None:
return u'<%s (unbound)>' % self.__class__.__name__
tmp = []
for is_dynamic, data in self._trace:
if is_dynamic:
tmp.append(u'<%s>' % data)
else:
tmp.append(data)
return u'<%s %s%s -> %s>' % (
self.__class__.__name__,
repr((u''.join(tmp)).lstrip(u'|')).lstrip(u'u'),
self.methods is not None
and u' (%s)' % u', '.join(self.methods)
or u'',
self.endpoint
)
class BaseConverter(object):
"""Base class for all converters."""
regex = '[^/]+'
weight = 100
def __init__(self, map):
self.map = map
def to_python(self, value):
return value
def to_url(self, value):
return url_quote(value, charset=self.map.charset)
class UnicodeConverter(BaseConverter):
"""This converter is the default converter and accepts any string but
only one path segment. Thus the string can not include a slash.
This is the default validator.
Example::
Rule('/pages/<page>'),
Rule('/<string(length=2):lang_code>')
:param map: the :class:`Map`.
:param minlength: the minimum length of the string. Must be greater
or equal 1.
:param maxlength: the maximum length of the string.
:param length: the exact length of the string.
"""
def __init__(self, map, minlength=1, maxlength=None, length=None):
BaseConverter.__init__(self, map)
if length is not None:
length = '{%d}' % int(length)
else:
if maxlength is None:
maxlength = ''
else:
maxlength = int(maxlength)
length = '{%s,%s}' % (
int(minlength),
maxlength
)
self.regex = '[^/]' + length
class AnyConverter(BaseConverter):
"""Matches one of the items provided. Items can either be Python
identifiers or strings::
Rule('/<any(about, help, imprint, class, "foo,bar"):page_name>')
:param map: the :class:`Map`.
:param items: this function accepts the possible items as positional
arguments.
"""
def __init__(self, map, *items):
BaseConverter.__init__(self, map)
self.regex = '(?:%s)' % '|'.join([re.escape(x) for x in items])
class PathConverter(BaseConverter):
"""Like the default :class:`UnicodeConverter`, but it also matches
slashes. This is useful for wikis and similar applications::
Rule('/<path:wikipage>')
Rule('/<path:wikipage>/edit')
:param map: the :class:`Map`.
"""
regex = '[^/].*?'
weight = 200
class NumberConverter(BaseConverter):
"""Baseclass for `IntegerConverter` and `FloatConverter`.
:internal:
"""
weight = 50
def __init__(self, map, fixed_digits=0, min=None, max=None):
BaseConverter.__init__(self, map)
self.fixed_digits = fixed_digits
self.min = min
self.max = max
def to_python(self, value):
if (self.fixed_digits and len(value) != self.fixed_digits):
raise ValidationError()
value = self.num_convert(value)
if (self.min is not None and value < self.min) or \
(self.max is not None and value > self.max):
raise ValidationError()
return value
def to_url(self, value):
value = self.num_convert(value)
if self.fixed_digits:
value = ('%%0%sd' % self.fixed_digits) % value
return str(value)
class IntegerConverter(NumberConverter):
"""This converter only accepts integer values::
Rule('/page/<int:page>')
This converter does not support negative values.
:param map: the :class:`Map`.
:param fixed_digits: the number of fixed digits in the URL. If you set
this to ``4`` for example, the application will
only match if the url looks like ``/0001/``. The
default is variable length.
:param min: the minimal value.
:param max: the maximal value.
"""
regex = r'\d+'
num_convert = int
class FloatConverter(NumberConverter):
"""This converter only accepts floating point values::
Rule('/probability/<float:probability>')
This converter does not support negative values.
:param map: the :class:`Map`.
:param min: the minimal value.
:param max: the maximal value.
"""
regex = r'\d+\.\d+'
num_convert = float
def __init__(self, map, min=None, max=None):
NumberConverter.__init__(self, map, 0, min, max)
class UUIDConverter(BaseConverter):
"""This converter only accepts UUID strings::
Rule('/object/<uuid:identifier>')
.. versionadded:: 0.10
:param map: the :class:`Map`.
"""
regex = r'[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-' \
r'[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}'
def to_python(self, value):
return uuid.UUID(value)
def to_url(self, value):
return str(value)
#: the default converter mapping for the map.
DEFAULT_CONVERTERS = {
'default': UnicodeConverter,
'string': UnicodeConverter,
'any': AnyConverter,
'path': PathConverter,
'int': IntegerConverter,
'float': FloatConverter,
'uuid': UUIDConverter,
}
class Map(object):
"""The map class stores all the URL rules and some configuration
parameters. Some of the configuration values are only stored on the
`Map` instance since those affect all rules, others are just defaults
and can be overridden for each rule. Note that you have to specify all
arguments besides the `rules` as keyword arguments!
:param rules: sequence of url rules for this map.
:param default_subdomain: The default subdomain for rules without a
subdomain defined.
:param charset: charset of the url. defaults to ``"utf-8"``
:param strict_slashes: Take care of trailing slashes.
:param redirect_defaults: This will redirect to the default rule if it
wasn't visited that way. This helps creating
unique URLs.
:param converters: A dict of converters that adds additional converters
to the list of converters. If you redefine one
converter this will override the original one.
:param sort_parameters: If set to `True` the url parameters are sorted.
See `url_encode` for more details.
:param sort_key: The sort key function for `url_encode`.
:param encoding_errors: the error method to use for decoding
:param host_matching: if set to `True` it enables the host matching
feature and disables the subdomain one. If
enabled the `host` parameter to rules is used
instead of the `subdomain` one.
.. versionadded:: 0.5
`sort_parameters` and `sort_key` was added.
.. versionadded:: 0.7
`encoding_errors` and `host_matching` was added.
"""
#: .. versionadded:: 0.6
#: a dict of default converters to be used.
default_converters = ImmutableDict(DEFAULT_CONVERTERS)
def __init__(self, rules=None, default_subdomain='', charset='utf-8',
strict_slashes=True, redirect_defaults=True,
converters=None, sort_parameters=False, sort_key=None,
encoding_errors='replace', host_matching=False):
self._rules = []
self._rules_by_endpoint = {}
self._remap = True
self._remap_lock = Lock()
self.default_subdomain = default_subdomain
self.charset = charset
self.encoding_errors = encoding_errors
self.strict_slashes = strict_slashes
self.redirect_defaults = redirect_defaults
self.host_matching = host_matching
self.converters = self.default_converters.copy()
if converters:
self.converters.update(converters)
self.sort_parameters = sort_parameters
self.sort_key = sort_key
for rulefactory in rules or ():
self.add(rulefactory)
def is_endpoint_expecting(self, endpoint, *arguments):
"""Iterate over all rules and check if the endpoint expects
the arguments provided. This is for example useful if you have
some URLs that expect a language code and others that do not and
you want to wrap the builder a bit so that the current language
code is automatically added if not provided but endpoints expect
it.
:param endpoint: the endpoint to check.
:param arguments: this function accepts one or more arguments
as positional arguments. Each one of them is
checked.
"""
self.update()
arguments = set(arguments)
for rule in self._rules_by_endpoint[endpoint]:
if arguments.issubset(rule.arguments):
return True
return False
def iter_rules(self, endpoint=None):
"""Iterate over all rules or the rules of an endpoint.
:param endpoint: if provided only the rules for that endpoint
are returned.
:return: an iterator
"""
self.update()
if endpoint is not None:
return iter(self._rules_by_endpoint[endpoint])
return iter(self._rules)
def add(self, rulefactory):
"""Add a new rule or factory to the map and bind it. Requires that the
rule is not bound to another map.
:param rulefactory: a :class:`Rule` or :class:`RuleFactory`
"""
for rule in rulefactory.get_rules(self):
rule.bind(self)
self._rules.append(rule)
self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule)
self._remap = True
def bind(self, server_name, script_name=None, subdomain=None,
url_scheme='http', default_method='GET', path_info=None,
query_args=None):
"""Return a new :class:`MapAdapter` with the details specified to the
call. Note that `script_name` will default to ``'/'`` if not further
specified or `None`. The `server_name` at least is a requirement
because the HTTP RFC requires absolute URLs for redirects and so all
redirect exceptions raised by Werkzeug will contain the full canonical
URL.
If no path_info is passed to :meth:`match` it will use the default path
info passed to bind. While this doesn't really make sense for
manual bind calls, it's useful if you bind a map to a WSGI
environment which already contains the path info.
`subdomain` will default to the `default_subdomain` for this map if
no defined. If there is no `default_subdomain` you cannot use the
subdomain feature.
.. versionadded:: 0.7
`query_args` added
.. versionadded:: 0.8
`query_args` can now also be a string.
"""
server_name = server_name.lower()
if self.host_matching:
if subdomain is not None:
raise RuntimeError('host matching enabled and a '
'subdomain was provided')
elif subdomain is None:
subdomain = self.default_subdomain
if script_name is None:
script_name = '/'
try:
server_name = _encode_idna(server_name)
except UnicodeError:
raise BadHost()
return MapAdapter(self, server_name, script_name, subdomain,
url_scheme, path_info, default_method, query_args)
def bind_to_environ(self, environ, server_name=None, subdomain=None):
"""Like :meth:`bind` but you can pass it an WSGI environment and it
will fetch the information from that dictionary. Note that because of
limitations in the protocol there is no way to get the current
subdomain and real `server_name` from the environment. If you don't
provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or
`HTTP_HOST` if provided) as used `server_name` with disabled subdomain
feature.
If `subdomain` is `None` but an environment and a server name is
provided it will calculate the current subdomain automatically.
Example: `server_name` is ``'example.com'`` and the `SERVER_NAME`
in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated
subdomain will be ``'staging.dev'``.
If the object passed as environ has an environ attribute, the value of
this attribute is used instead. This allows you to pass request
objects. Additionally `PATH_INFO` added as a default of the
:class:`MapAdapter` so that you don't have to pass the path info to
the match method.
.. versionchanged:: 0.5
previously this method accepted a bogus `calculate_subdomain`
parameter that did not have any effect. It was removed because
of that.
.. versionchanged:: 0.8
This will no longer raise a ValueError when an unexpected server
name was passed.
:param environ: a WSGI environment.
:param server_name: an optional server name hint (see above).
:param subdomain: optionally the current subdomain (see above).
"""
environ = _get_environ(environ)
if 'HTTP_HOST' in environ:
wsgi_server_name = environ['HTTP_HOST']
if environ['wsgi.url_scheme'] == 'http' \
and wsgi_server_name.endswith(':80'):
wsgi_server_name = wsgi_server_name[:-3]
elif environ['wsgi.url_scheme'] == 'https' \
and wsgi_server_name.endswith(':443'):
wsgi_server_name = wsgi_server_name[:-4]
else:
wsgi_server_name = environ['SERVER_NAME']
if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \
in (('https', '443'), ('http', '80')):
wsgi_server_name += ':' + environ['SERVER_PORT']
wsgi_server_name = wsgi_server_name.lower()
if server_name is None:
server_name = wsgi_server_name
else:
server_name = server_name.lower()
if subdomain is None and not self.host_matching:
cur_server_name = wsgi_server_name.split('.')
real_server_name = server_name.split('.')
offset = -len(real_server_name)
if cur_server_name[offset:] != real_server_name:
# This can happen even with valid configs if the server was
# accesssed directly by IP address under some situations.
# Instead of raising an exception like in Werkzeug 0.7 or
# earlier we go by an invalid subdomain which will result
# in a 404 error on matching.
subdomain = '<invalid>'
else:
subdomain = '.'.join(filter(None, cur_server_name[:offset]))
def _get_wsgi_string(name):
val = environ.get(name)
if val is not None:
return wsgi_decoding_dance(val, self.charset)
script_name = _get_wsgi_string('SCRIPT_NAME')
path_info = _get_wsgi_string('PATH_INFO')
query_args = _get_wsgi_string('QUERY_STRING')
return Map.bind(self, server_name, script_name,
subdomain, environ['wsgi.url_scheme'],
environ['REQUEST_METHOD'], path_info,
query_args=query_args)
def update(self):
"""Called before matching and building to keep the compiled rules
in the correct order after things changed.
"""
if not self._remap:
return
with self._remap_lock:
if not self._remap:
return
self._rules.sort(key=lambda x: x.match_compare_key())
for rules in itervalues(self._rules_by_endpoint):
rules.sort(key=lambda x: x.build_compare_key())
self._remap = False
def __repr__(self):
rules = self.iter_rules()
return '%s(%s)' % (self.__class__.__name__, pformat(list(rules)))
class MapAdapter(object):
"""Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does
the URL matching and building based on runtime information.
"""
def __init__(self, map, server_name, script_name, subdomain,
url_scheme, path_info, default_method, query_args=None):
self.map = map
self.server_name = to_unicode(server_name)
script_name = to_unicode(script_name)
if not script_name.endswith(u'/'):
script_name += u'/'
self.script_name = script_name
self.subdomain = to_unicode(subdomain)
self.url_scheme = to_unicode(url_scheme)
self.path_info = to_unicode(path_info)
self.default_method = to_unicode(default_method)
self.query_args = query_args
def dispatch(self, view_func, path_info=None, method=None,
catch_http_exceptions=False):
"""Does the complete dispatching process. `view_func` is called with
the endpoint and a dict with the values for the view. It should
look up the view function, call it, and return a response object
or WSGI application. http exceptions are not caught by default
so that applications can display nicer error messages by just
catching them by hand. If you want to stick with the default
error messages you can pass it ``catch_http_exceptions=True`` and
it will catch the http exceptions.
Here a small example for the dispatch usage::
from werkzeug.wrappers import Request, Response
from werkzeug.wsgi import responder
from werkzeug.routing import Map, Rule
def on_index(request):
return Response('Hello from the index')
url_map = Map([Rule('/', endpoint='index')])
views = {'index': on_index}
@responder
def application(environ, start_response):
request = Request(environ)
urls = url_map.bind_to_environ(environ)
return urls.dispatch(lambda e, v: views[e](request, **v),
catch_http_exceptions=True)
Keep in mind that this method might return exception objects, too, so
use :class:`Response.force_type` to get a response object.
:param view_func: a function that is called with the endpoint as
first argument and the value dict as second. Has
to dispatch to the actual view function with this
information. (see above)
:param path_info: the path info to use for matching. Overrides the
path info specified on binding.
:param method: the HTTP method used for matching. Overrides the
method specified on binding.
:param catch_http_exceptions: set to `True` to catch any of the
werkzeug :class:`HTTPException`\s.
"""
try:
try:
endpoint, args = self.match(path_info, method)
except RequestRedirect as e:
return e
return view_func(endpoint, args)
except HTTPException as e:
if catch_http_exceptions:
return e
raise
def match(self, path_info=None, method=None, return_rule=False,
query_args=None):
"""The usage is simple: you just pass the match method the current
path info as well as the method (which defaults to `GET`). The
following things can then happen:
- you receive a `NotFound` exception that indicates that no URL is
matching. A `NotFound` exception is also a WSGI application you
can call to get a default page not found page (happens to be the
same object as `werkzeug.exceptions.NotFound`)
- you receive a `MethodNotAllowed` exception that indicates that there
is a match for this URL but not for the current request method.
This is useful for RESTful applications.
- you receive a `RequestRedirect` exception with a `new_url`
attribute. This exception is used to notify you about a request
Werkzeug requests from your WSGI application. This is for example the
case if you request ``/foo`` although the correct URL is ``/foo/``
You can use the `RequestRedirect` instance as response-like object
similar to all other subclasses of `HTTPException`.
- you get a tuple in the form ``(endpoint, arguments)`` if there is
a match (unless `return_rule` is True, in which case you get a tuple
in the form ``(rule, arguments)``)
If the path info is not passed to the match method the default path
info of the map is used (defaults to the root URL if not defined
explicitly).
All of the exceptions raised are subclasses of `HTTPException` so they
can be used as WSGI responses. The will all render generic error or
redirect pages.
Here is a small example for matching:
>>> m = Map([
... Rule('/', endpoint='index'),
... Rule('/downloads/', endpoint='downloads/index'),
... Rule('/downloads/<int:id>', endpoint='downloads/show')
... ])
>>> urls = m.bind("example.com", "/")
>>> urls.match("/", "GET")
('index', {})
>>> urls.match("/downloads/42")
('downloads/show', {'id': 42})
And here is what happens on redirect and missing URLs:
>>> urls.match("/downloads")
Traceback (most recent call last):
...
RequestRedirect: http://example.com/downloads/
>>> urls.match("/missing")
Traceback (most recent call last):
...
NotFound: 404 Not Found
:param path_info: the path info to use for matching. Overrides the
path info specified on binding.
:param method: the HTTP method used for matching. Overrides the
method specified on binding.
:param return_rule: return the rule that matched instead of just the
endpoint (defaults to `False`).
:param query_args: optional query arguments that are used for
automatic redirects as string or dictionary. It's
currently not possible to use the query arguments
for URL matching.
.. versionadded:: 0.6
`return_rule` was added.
.. versionadded:: 0.7
`query_args` was added.
.. versionchanged:: 0.8
`query_args` can now also be a string.
"""
self.map.update()
if path_info is None:
path_info = self.path_info
else:
path_info = to_unicode(path_info, self.map.charset)
if query_args is None:
query_args = self.query_args
method = (method or self.default_method).upper()
path = u'%s|%s' % (
self.map.host_matching and self.server_name or self.subdomain,
path_info and '/%s' % path_info.lstrip('/')
)
have_match_for = set()
for rule in self.map._rules:
try:
rv = rule.match(path)
except RequestSlash:
raise RequestRedirect(self.make_redirect_url(
url_quote(path_info, self.map.charset,
safe='/:|+') + '/', query_args))
except RequestAliasRedirect as e:
raise RequestRedirect(self.make_alias_redirect_url(
path, rule.endpoint, e.matched_values, method, query_args))
if rv is None:
continue
if rule.methods is not None and method not in rule.methods:
have_match_for.update(rule.methods)
continue
if self.map.redirect_defaults:
redirect_url = self.get_default_redirect(rule, method, rv,
query_args)
if redirect_url is not None:
raise RequestRedirect(redirect_url)
if rule.redirect_to is not None:
if isinstance(rule.redirect_to, string_types):
def _handle_match(match):
value = rv[match.group(1)]
return rule._converters[match.group(1)].to_url(value)
redirect_url = _simple_rule_re.sub(_handle_match,
rule.redirect_to)
else:
redirect_url = rule.redirect_to(self, **rv)
raise RequestRedirect(str(url_join('%s://%s%s%s' % (
self.url_scheme or 'http',
self.subdomain and self.subdomain + '.' or '',
self.server_name,
self.script_name
), redirect_url)))
if return_rule:
return rule, rv
else:
return rule.endpoint, rv
if have_match_for:
raise MethodNotAllowed(valid_methods=list(have_match_for))
raise NotFound()
def test(self, path_info=None, method=None):
"""Test if a rule would match. Works like `match` but returns `True`
if the URL matches, or `False` if it does not exist.
:param path_info: the path info to use for matching. Overrides the
path info specified on binding.
:param method: the HTTP method used for matching. Overrides the
method specified on binding.
"""
try:
self.match(path_info, method)
except RequestRedirect:
pass
except HTTPException:
return False
return True
def allowed_methods(self, path_info=None):
"""Returns the valid methods that match for a given path.
.. versionadded:: 0.7
"""
try:
self.match(path_info, method='--')
except MethodNotAllowed as e:
return e.valid_methods
except HTTPException as e:
pass
return []
def get_host(self, domain_part):
"""Figures out the full host name for the given domain part. The
domain part is a subdomain in case host matching is disabled or
a full host name.
"""
if self.map.host_matching:
if domain_part is None:
return self.server_name
return to_unicode(domain_part, 'ascii')
subdomain = domain_part
if subdomain is None:
subdomain = self.subdomain
else:
subdomain = to_unicode(subdomain, 'ascii')
return (subdomain and subdomain + u'.' or u'') + self.server_name
def get_default_redirect(self, rule, method, values, query_args):
"""A helper that returns the URL to redirect to if it finds one.
This is used for default redirecting only.
:internal:
"""
assert self.map.redirect_defaults
for r in self.map._rules_by_endpoint[rule.endpoint]:
# every rule that comes after this one, including ourself
# has a lower priority for the defaults. We order the ones
# with the highest priority up for building.
if r is rule:
break
if r.provides_defaults_for(rule) and \
r.suitable_for(values, method):
values.update(r.defaults)
domain_part, path = r.build(values)
return self.make_redirect_url(
path, query_args, domain_part=domain_part)
def encode_query_args(self, query_args):
if not isinstance(query_args, string_types):
query_args = url_encode(query_args, self.map.charset)
return query_args
def make_redirect_url(self, path_info, query_args=None, domain_part=None):
"""Creates a redirect URL.
:internal:
"""
suffix = ''
if query_args:
suffix = '?' + self.encode_query_args(query_args)
return str('%s://%s/%s%s' % (
self.url_scheme or 'http',
self.get_host(domain_part),
posixpath.join(self.script_name[:-1].lstrip('/'),
path_info.lstrip('/')),
suffix
))
def make_alias_redirect_url(self, path, endpoint, values, method, query_args):
"""Internally called to make an alias redirect URL."""
url = self.build(endpoint, values, method, append_unknown=False,
force_external=True)
if query_args:
url += '?' + self.encode_query_args(query_args)
assert url != path, 'detected invalid alias setting. No canonical ' \
'URL found'
return url
def _partial_build(self, endpoint, values, method, append_unknown):
"""Helper for :meth:`build`. Returns subdomain and path for the
rule that accepts this endpoint, values and method.
:internal:
"""
# in case the method is none, try with the default method first
if method is None:
rv = self._partial_build(endpoint, values, self.default_method,
append_unknown)
if rv is not None:
return rv
# default method did not match or a specific method is passed,
# check all and go with first result.
for rule in self.map._rules_by_endpoint.get(endpoint, ()):
if rule.suitable_for(values, method):
rv = rule.build(values, append_unknown)
if rv is not None:
return rv
def build(self, endpoint, values=None, method=None, force_external=False,
append_unknown=True):
"""Building URLs works pretty much the other way round. Instead of
`match` you call `build` and pass it the endpoint and a dict of
arguments for the placeholders.
The `build` function also accepts an argument called `force_external`
which, if you set it to `True` will force external URLs. Per default
external URLs (include the server name) will only be used if the
target URL is on a different subdomain.
>>> m = Map([
... Rule('/', endpoint='index'),
... Rule('/downloads/', endpoint='downloads/index'),
... Rule('/downloads/<int:id>', endpoint='downloads/show')
... ])
>>> urls = m.bind("example.com", "/")
>>> urls.build("index", {})
'/'
>>> urls.build("downloads/show", {'id': 42})
'/downloads/42'
>>> urls.build("downloads/show", {'id': 42}, force_external=True)
'http://example.com/downloads/42'
Because URLs cannot contain non ASCII data you will always get
bytestrings back. Non ASCII characters are urlencoded with the
charset defined on the map instance.
Additional values are converted to unicode and appended to the URL as
URL querystring parameters:
>>> urls.build("index", {'q': 'My Searchstring'})
'/?q=My+Searchstring'
When processing those additional values, lists are furthermore
interpreted as multiple values (as per
:py:class:`werkzeug.datastructures.MultiDict`):
>>> urls.build("index", {'q': ['a', 'b', 'c']})
'/?q=a&q=b&q=c'
If a rule does not exist when building a `BuildError` exception is
raised.
The build method accepts an argument called `method` which allows you
to specify the method you want to have an URL built for if you have
different methods for the same endpoint specified.
.. versionadded:: 0.6
the `append_unknown` parameter was added.
:param endpoint: the endpoint of the URL to build.
:param values: the values for the URL to build. Unhandled values are
appended to the URL as query parameters.
:param method: the HTTP method for the rule if there are different
URLs for different methods on the same endpoint.
:param force_external: enforce full canonical external URLs. If the URL
scheme is not provided, this will generate
a protocol-relative URL.
:param append_unknown: unknown parameters are appended to the generated
URL as query string argument. Disable this
if you want the builder to ignore those.
"""
self.map.update()
if values:
if isinstance(values, MultiDict):
valueiter = iteritems(values, multi=True)
else:
valueiter = iteritems(values)
values = dict((k, v) for k, v in valueiter if v is not None)
else:
values = {}
rv = self._partial_build(endpoint, values, method, append_unknown)
if rv is None:
raise BuildError(endpoint, values, method, self)
domain_part, path = rv
host = self.get_host(domain_part)
# shortcut this.
if not force_external and (
(self.map.host_matching and host == self.server_name) or
(not self.map.host_matching and domain_part == self.subdomain)
):
return str(url_join(self.script_name, './' + path.lstrip('/')))
return str('%s//%s%s/%s' % (
self.url_scheme + ':' if self.url_scheme else '',
host,
self.script_name[:-1],
path.lstrip('/')
))
| mit |
Qalthos/ansible | test/units/plugins/connection/test_winrm.py | 47 | 15343 | # -*- coding: utf-8 -*-
# (c) 2018, Jordan Borean <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from io import StringIO
from units.compat.mock import patch, MagicMock
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
from ansible.plugins.connection import winrm
pytest.importorskip("winrm")
class TestConnectionWinRM(object):
OPTIONS_DATA = (
# default options
(
{},
{'_extras': {}},
{},
{
'_kerb_managed': False,
'_kinit_cmd': 'kinit',
'_winrm_connection_timeout': None,
'_winrm_host': 'inventory_hostname',
'_winrm_kwargs': {'username': None, 'password': ''},
'_winrm_pass': '',
'_winrm_path': '/wsman',
'_winrm_port': 5986,
'_winrm_scheme': 'https',
'_winrm_transport': ['ssl'],
'_winrm_user': None
},
False
),
# http through port
(
{},
{'_extras': {}, 'ansible_port': 5985},
{},
{
'_winrm_kwargs': {'username': None, 'password': ''},
'_winrm_port': 5985,
'_winrm_scheme': 'http',
'_winrm_transport': ['plaintext'],
},
False
),
# kerberos user with kerb present
(
{},
{'_extras': {}, 'ansible_user': '[email protected]'},
{},
{
'_kerb_managed': False,
'_kinit_cmd': 'kinit',
'_winrm_kwargs': {'username': '[email protected]',
'password': ''},
'_winrm_pass': '',
'_winrm_transport': ['kerberos', 'ssl'],
'_winrm_user': '[email protected]'
},
True
),
# kerberos user without kerb present
(
{},
{'_extras': {}, 'ansible_user': '[email protected]'},
{},
{
'_kerb_managed': False,
'_kinit_cmd': 'kinit',
'_winrm_kwargs': {'username': '[email protected]',
'password': ''},
'_winrm_pass': '',
'_winrm_transport': ['ssl'],
'_winrm_user': '[email protected]'
},
False
),
# kerberos user with managed ticket (implicit)
(
{'password': 'pass'},
{'_extras': {}, 'ansible_user': '[email protected]'},
{},
{
'_kerb_managed': True,
'_kinit_cmd': 'kinit',
'_winrm_kwargs': {'username': '[email protected]',
'password': 'pass'},
'_winrm_pass': 'pass',
'_winrm_transport': ['kerberos', 'ssl'],
'_winrm_user': '[email protected]'
},
True
),
# kerb with managed ticket (explicit)
(
{'password': 'pass'},
{'_extras': {}, 'ansible_user': '[email protected]',
'ansible_winrm_kinit_mode': 'managed'},
{},
{
'_kerb_managed': True,
},
True
),
# kerb with unmanaged ticket (explicit))
(
{'password': 'pass'},
{'_extras': {}, 'ansible_user': '[email protected]',
'ansible_winrm_kinit_mode': 'manual'},
{},
{
'_kerb_managed': False,
},
True
),
# transport override (single)
(
{},
{'_extras': {}, 'ansible_user': '[email protected]',
'ansible_winrm_transport': 'ntlm'},
{},
{
'_winrm_kwargs': {'username': '[email protected]',
'password': ''},
'_winrm_pass': '',
'_winrm_transport': ['ntlm'],
},
False
),
# transport override (list)
(
{},
{'_extras': {}, 'ansible_user': '[email protected]',
'ansible_winrm_transport': ['ntlm', 'certificate']},
{},
{
'_winrm_kwargs': {'username': '[email protected]',
'password': ''},
'_winrm_pass': '',
'_winrm_transport': ['ntlm', 'certificate'],
},
False
),
# winrm extras
(
{},
{'_extras': {'ansible_winrm_server_cert_validation': 'ignore',
'ansible_winrm_service': 'WSMAN'}},
{},
{
'_winrm_kwargs': {'username': None, 'password': '',
'server_cert_validation': 'ignore',
'service': 'WSMAN'},
},
False
),
# direct override
(
{},
{'_extras': {}, 'ansible_winrm_connection_timeout': 5},
{'connection_timeout': 10},
{
'_winrm_connection_timeout': 10,
},
False
),
# user comes from option not play context
(
{'username': 'user1'},
{'_extras': {}, 'ansible_user': 'user2'},
{},
{
'_winrm_user': 'user2',
'_winrm_kwargs': {'username': 'user2', 'password': ''}
},
False
)
)
# pylint bug: https://github.com/PyCQA/pylint/issues/511
# pylint: disable=undefined-variable
@pytest.mark.parametrize('play, options, direct, expected, kerb',
((p, o, d, e, k) for p, o, d, e, k in OPTIONS_DATA))
def test_set_options(self, play, options, direct, expected, kerb):
winrm.HAVE_KERBEROS = kerb
pc = PlayContext()
for attr, value in play.items():
setattr(pc, attr, value)
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options=options, direct=direct)
conn._build_winrm_kwargs()
for attr, expected in expected.items():
actual = getattr(conn, attr)
assert actual == expected, \
"winrm attr '%s', actual '%s' != expected '%s'"\
% (attr, actual, expected)
class TestWinRMKerbAuth(object):
@pytest.mark.parametrize('options, expected', [
[{"_extras": {}},
(["kinit", "user@domain"],)],
[{"_extras": {}, 'ansible_winrm_kinit_cmd': 'kinit2'},
(["kinit2", "user@domain"],)],
[{"_extras": {'ansible_winrm_kerberos_delegation': True}},
(["kinit", "-f", "user@domain"],)],
])
def test_kinit_success_subprocess(self, monkeypatch, options, expected):
def mock_communicate(input=None, timeout=None):
return b"", b""
mock_popen = MagicMock()
mock_popen.return_value.communicate = mock_communicate
mock_popen.return_value.returncode = 0
monkeypatch.setattr("subprocess.Popen", mock_popen)
winrm.HAS_PEXPECT = False
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options=options)
conn._build_winrm_kwargs()
conn._kerb_auth("user@domain", "pass")
mock_calls = mock_popen.mock_calls
assert len(mock_calls) == 1
assert mock_calls[0][1] == expected
actual_env = mock_calls[0][2]['env']
assert list(actual_env.keys()) == ['KRB5CCNAME']
assert actual_env['KRB5CCNAME'].startswith("FILE:/")
@pytest.mark.parametrize('options, expected', [
[{"_extras": {}},
("kinit", ["user@domain"],)],
[{"_extras": {}, 'ansible_winrm_kinit_cmd': 'kinit2'},
("kinit2", ["user@domain"],)],
[{"_extras": {'ansible_winrm_kerberos_delegation': True}},
("kinit", ["-f", "user@domain"],)],
])
def test_kinit_success_pexpect(self, monkeypatch, options, expected):
pytest.importorskip("pexpect")
mock_pexpect = MagicMock()
mock_pexpect.return_value.exitstatus = 0
monkeypatch.setattr("pexpect.spawn", mock_pexpect)
winrm.HAS_PEXPECT = True
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options=options)
conn._build_winrm_kwargs()
conn._kerb_auth("user@domain", "pass")
mock_calls = mock_pexpect.mock_calls
assert mock_calls[0][1] == expected
actual_env = mock_calls[0][2]['env']
assert list(actual_env.keys()) == ['KRB5CCNAME']
assert actual_env['KRB5CCNAME'].startswith("FILE:/")
assert mock_calls[0][2]['echo'] is False
assert mock_calls[1][0] == "().expect"
assert mock_calls[1][1] == (".*:",)
assert mock_calls[2][0] == "().sendline"
assert mock_calls[2][1] == ("pass",)
assert mock_calls[3][0] == "().read"
assert mock_calls[4][0] == "().wait"
def test_kinit_with_missing_executable_subprocess(self, monkeypatch):
expected_err = "[Errno 2] No such file or directory: " \
"'/fake/kinit': '/fake/kinit'"
mock_popen = MagicMock(side_effect=OSError(expected_err))
monkeypatch.setattr("subprocess.Popen", mock_popen)
winrm.HAS_PEXPECT = False
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
options = {"_extras": {}, "ansible_winrm_kinit_cmd": "/fake/kinit"}
conn.set_options(var_options=options)
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("user@domain", "pass")
assert str(err.value) == "Kerberos auth failure when calling " \
"kinit cmd '/fake/kinit': %s" % expected_err
def test_kinit_with_missing_executable_pexpect(self, monkeypatch):
pexpect = pytest.importorskip("pexpect")
expected_err = "The command was not found or was not " \
"executable: /fake/kinit"
mock_pexpect = \
MagicMock(side_effect=pexpect.ExceptionPexpect(expected_err))
monkeypatch.setattr("pexpect.spawn", mock_pexpect)
winrm.HAS_PEXPECT = True
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
options = {"_extras": {}, "ansible_winrm_kinit_cmd": "/fake/kinit"}
conn.set_options(var_options=options)
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("user@domain", "pass")
assert str(err.value) == "Kerberos auth failure when calling " \
"kinit cmd '/fake/kinit': %s" % expected_err
def test_kinit_error_subprocess(self, monkeypatch):
expected_err = "kinit: krb5_parse_name: " \
"Configuration file does not specify default realm"
def mock_communicate(input=None, timeout=None):
return b"", to_bytes(expected_err)
mock_popen = MagicMock()
mock_popen.return_value.communicate = mock_communicate
mock_popen.return_value.returncode = 1
monkeypatch.setattr("subprocess.Popen", mock_popen)
winrm.HAS_PEXPECT = False
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options={"_extras": {}})
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("invaliduser", "pass")
assert str(err.value) == \
"Kerberos auth failure for principal invaliduser with " \
"subprocess: %s" % (expected_err)
def test_kinit_error_pexpect(self, monkeypatch):
pytest.importorskip("pexpect")
expected_err = "Configuration file does not specify default realm"
mock_pexpect = MagicMock()
mock_pexpect.return_value.expect = MagicMock(side_effect=OSError)
mock_pexpect.return_value.read.return_value = to_bytes(expected_err)
mock_pexpect.return_value.exitstatus = 1
monkeypatch.setattr("pexpect.spawn", mock_pexpect)
winrm.HAS_PEXPECT = True
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options={"_extras": {}})
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("invaliduser", "pass")
assert str(err.value) == \
"Kerberos auth failure for principal invaliduser with " \
"pexpect: %s" % (expected_err)
def test_kinit_error_pass_in_output_subprocess(self, monkeypatch):
def mock_communicate(input=None, timeout=None):
return b"", b"Error with kinit\n" + input
mock_popen = MagicMock()
mock_popen.return_value.communicate = mock_communicate
mock_popen.return_value.returncode = 1
monkeypatch.setattr("subprocess.Popen", mock_popen)
winrm.HAS_PEXPECT = False
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options={"_extras": {}})
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("username", "password")
assert str(err.value) == \
"Kerberos auth failure for principal username with subprocess: " \
"Error with kinit\n<redacted>"
def test_kinit_error_pass_in_output_pexpect(self, monkeypatch):
pytest.importorskip("pexpect")
mock_pexpect = MagicMock()
mock_pexpect.return_value.expect = MagicMock()
mock_pexpect.return_value.read.return_value = \
b"Error with kinit\npassword\n"
mock_pexpect.return_value.exitstatus = 1
monkeypatch.setattr("pexpect.spawn", mock_pexpect)
winrm.HAS_PEXPECT = True
pc = PlayContext()
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('winrm', pc, new_stdin)
conn.set_options(var_options={"_extras": {}})
conn._build_winrm_kwargs()
with pytest.raises(AnsibleConnectionFailure) as err:
conn._kerb_auth("username", "password")
assert str(err.value) == \
"Kerberos auth failure for principal username with pexpect: " \
"Error with kinit\n<redacted>"
| gpl-3.0 |
JCROM-Android/jcrom_external_chromium_org | third_party/tlslite/tlslite/utils/rijndael.py | 359 | 11341 | """
A pure python (slow) implementation of rijndael with a decent interface
To include -
from rijndael import rijndael
To do a key setup -
r = rijndael(key, block_size = 16)
key must be a string of length 16, 24, or 32
blocksize must be 16, 24, or 32. Default is 16
To use -
ciphertext = r.encrypt(plaintext)
plaintext = r.decrypt(ciphertext)
If any strings are of the wrong length a ValueError is thrown
"""
# ported from the Java reference code by Bram Cohen, [email protected], April 2001
# this code is public domain, unless someone makes
# an intellectual property claim against the reference
# code, in which case it can be made public domain by
# deleting all the comments and renaming all the variables
import copy
import string
#-----------------------
#TREV - ADDED BECAUSE THERE'S WARNINGS ABOUT INT OVERFLOW BEHAVIOR CHANGING IN
#2.4.....
import os
if os.name != "java":
import exceptions
if hasattr(exceptions, "FutureWarning"):
import warnings
warnings.filterwarnings("ignore", category=FutureWarning, append=1)
#-----------------------
shifts = [[[0, 0], [1, 3], [2, 2], [3, 1]],
[[0, 0], [1, 5], [2, 4], [3, 3]],
[[0, 0], [1, 7], [3, 5], [4, 4]]]
# [keysize][block_size]
num_rounds = {16: {16: 10, 24: 12, 32: 14}, 24: {16: 12, 24: 12, 32: 14}, 32: {16: 14, 24: 14, 32: 14}}
A = [[1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 0, 0, 0, 1]]
# produce log and alog tables, needed for multiplying in the
# field GF(2^m) (generator = 3)
alog = [1]
for i in xrange(255):
j = (alog[-1] << 1) ^ alog[-1]
if j & 0x100 != 0:
j ^= 0x11B
alog.append(j)
log = [0] * 256
for i in xrange(1, 255):
log[alog[i]] = i
# multiply two elements of GF(2^m)
def mul(a, b):
if a == 0 or b == 0:
return 0
return alog[(log[a & 0xFF] + log[b & 0xFF]) % 255]
# substitution box based on F^{-1}(x)
box = [[0] * 8 for i in xrange(256)]
box[1][7] = 1
for i in xrange(2, 256):
j = alog[255 - log[i]]
for t in xrange(8):
box[i][t] = (j >> (7 - t)) & 0x01
B = [0, 1, 1, 0, 0, 0, 1, 1]
# affine transform: box[i] <- B + A*box[i]
cox = [[0] * 8 for i in xrange(256)]
for i in xrange(256):
for t in xrange(8):
cox[i][t] = B[t]
for j in xrange(8):
cox[i][t] ^= A[t][j] * box[i][j]
# S-boxes and inverse S-boxes
S = [0] * 256
Si = [0] * 256
for i in xrange(256):
S[i] = cox[i][0] << 7
for t in xrange(1, 8):
S[i] ^= cox[i][t] << (7-t)
Si[S[i] & 0xFF] = i
# T-boxes
G = [[2, 1, 1, 3],
[3, 2, 1, 1],
[1, 3, 2, 1],
[1, 1, 3, 2]]
AA = [[0] * 8 for i in xrange(4)]
for i in xrange(4):
for j in xrange(4):
AA[i][j] = G[i][j]
AA[i][i+4] = 1
for i in xrange(4):
pivot = AA[i][i]
if pivot == 0:
t = i + 1
while AA[t][i] == 0 and t < 4:
t += 1
assert t != 4, 'G matrix must be invertible'
for j in xrange(8):
AA[i][j], AA[t][j] = AA[t][j], AA[i][j]
pivot = AA[i][i]
for j in xrange(8):
if AA[i][j] != 0:
AA[i][j] = alog[(255 + log[AA[i][j] & 0xFF] - log[pivot & 0xFF]) % 255]
for t in xrange(4):
if i != t:
for j in xrange(i+1, 8):
AA[t][j] ^= mul(AA[i][j], AA[t][i])
AA[t][i] = 0
iG = [[0] * 4 for i in xrange(4)]
for i in xrange(4):
for j in xrange(4):
iG[i][j] = AA[i][j + 4]
def mul4(a, bs):
if a == 0:
return 0
r = 0
for b in bs:
r <<= 8
if b != 0:
r = r | mul(a, b)
return r
T1 = []
T2 = []
T3 = []
T4 = []
T5 = []
T6 = []
T7 = []
T8 = []
U1 = []
U2 = []
U3 = []
U4 = []
for t in xrange(256):
s = S[t]
T1.append(mul4(s, G[0]))
T2.append(mul4(s, G[1]))
T3.append(mul4(s, G[2]))
T4.append(mul4(s, G[3]))
s = Si[t]
T5.append(mul4(s, iG[0]))
T6.append(mul4(s, iG[1]))
T7.append(mul4(s, iG[2]))
T8.append(mul4(s, iG[3]))
U1.append(mul4(t, iG[0]))
U2.append(mul4(t, iG[1]))
U3.append(mul4(t, iG[2]))
U4.append(mul4(t, iG[3]))
# round constants
rcon = [1]
r = 1
for t in xrange(1, 30):
r = mul(2, r)
rcon.append(r)
del A
del AA
del pivot
del B
del G
del box
del log
del alog
del i
del j
del r
del s
del t
del mul
del mul4
del cox
del iG
class rijndael:
def __init__(self, key, block_size = 16):
if block_size != 16 and block_size != 24 and block_size != 32:
raise ValueError('Invalid block size: ' + str(block_size))
if len(key) != 16 and len(key) != 24 and len(key) != 32:
raise ValueError('Invalid key size: ' + str(len(key)))
self.block_size = block_size
ROUNDS = num_rounds[len(key)][block_size]
BC = block_size / 4
# encryption round keys
Ke = [[0] * BC for i in xrange(ROUNDS + 1)]
# decryption round keys
Kd = [[0] * BC for i in xrange(ROUNDS + 1)]
ROUND_KEY_COUNT = (ROUNDS + 1) * BC
KC = len(key) / 4
# copy user material bytes into temporary ints
tk = []
for i in xrange(0, KC):
tk.append((ord(key[i * 4]) << 24) | (ord(key[i * 4 + 1]) << 16) |
(ord(key[i * 4 + 2]) << 8) | ord(key[i * 4 + 3]))
# copy values into round key arrays
t = 0
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t / BC][t % BC] = tk[j]
Kd[ROUNDS - (t / BC)][t % BC] = tk[j]
j += 1
t += 1
tt = 0
rconpointer = 0
while t < ROUND_KEY_COUNT:
# extrapolate using phi (the round key evolution function)
tt = tk[KC - 1]
tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \
(S[ tt & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) ^ \
(rcon[rconpointer] & 0xFF) << 24
rconpointer += 1
if KC != 8:
for i in xrange(1, KC):
tk[i] ^= tk[i-1]
else:
for i in xrange(1, KC / 2):
tk[i] ^= tk[i-1]
tt = tk[KC / 2 - 1]
tk[KC / 2] ^= (S[ tt & 0xFF] & 0xFF) ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) << 24
for i in xrange(KC / 2 + 1, KC):
tk[i] ^= tk[i-1]
# copy values into round key arrays
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t / BC][t % BC] = tk[j]
Kd[ROUNDS - (t / BC)][t % BC] = tk[j]
j += 1
t += 1
# inverse MixColumn where needed
for r in xrange(1, ROUNDS):
for j in xrange(BC):
tt = Kd[r][j]
Kd[r][j] = U1[(tt >> 24) & 0xFF] ^ \
U2[(tt >> 16) & 0xFF] ^ \
U3[(tt >> 8) & 0xFF] ^ \
U4[ tt & 0xFF]
self.Ke = Ke
self.Kd = Kd
def encrypt(self, plaintext):
if len(plaintext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Ke = self.Ke
BC = self.block_size / 4
ROUNDS = len(Ke) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][0]
s2 = shifts[SC][2][0]
s3 = shifts[SC][3][0]
a = [0] * BC
# temporary work array
t = []
# plaintext to ints + key
for i in xrange(BC):
t.append((ord(plaintext[i * 4 ]) << 24 |
ord(plaintext[i * 4 + 1]) << 16 |
ord(plaintext[i * 4 + 2]) << 8 |
ord(plaintext[i * 4 + 3]) ) ^ Ke[0][i])
# apply round transforms
for r in xrange(1, ROUNDS):
for i in xrange(BC):
a[i] = (T1[(t[ i ] >> 24) & 0xFF] ^
T2[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T3[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T4[ t[(i + s3) % BC] & 0xFF] ) ^ Ke[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in xrange(BC):
tt = Ke[ROUNDS][i]
result.append((S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((S[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((S[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((S[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return string.join(map(chr, result), '')
def decrypt(self, ciphertext):
if len(ciphertext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Kd = self.Kd
BC = self.block_size / 4
ROUNDS = len(Kd) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][1]
s2 = shifts[SC][2][1]
s3 = shifts[SC][3][1]
a = [0] * BC
# temporary work array
t = [0] * BC
# ciphertext to ints + key
for i in xrange(BC):
t[i] = (ord(ciphertext[i * 4 ]) << 24 |
ord(ciphertext[i * 4 + 1]) << 16 |
ord(ciphertext[i * 4 + 2]) << 8 |
ord(ciphertext[i * 4 + 3]) ) ^ Kd[0][i]
# apply round transforms
for r in xrange(1, ROUNDS):
for i in xrange(BC):
a[i] = (T5[(t[ i ] >> 24) & 0xFF] ^
T6[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T7[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T8[ t[(i + s3) % BC] & 0xFF] ) ^ Kd[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in xrange(BC):
tt = Kd[ROUNDS][i]
result.append((Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((Si[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((Si[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((Si[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return string.join(map(chr, result), '')
def encrypt(key, block):
return rijndael(key, len(block)).encrypt(block)
def decrypt(key, block):
return rijndael(key, len(block)).decrypt(block)
def test():
def t(kl, bl):
b = 'b' * bl
r = rijndael('a' * kl, bl)
assert r.decrypt(r.encrypt(b)) == b
t(16, 16)
t(16, 24)
t(16, 32)
t(24, 16)
t(24, 24)
t(24, 32)
t(32, 16)
t(32, 24)
t(32, 32)
| bsd-3-clause |
great-expectations/great_expectations | tests/datasource/test_batch_generators.py | 1 | 6706 | import os
from great_expectations.datasource.batch_kwargs_generator import (
DatabricksTableBatchKwargsGenerator,
GlobReaderBatchKwargsGenerator,
SubdirReaderBatchKwargsGenerator,
)
try:
from unittest import mock
except ImportError:
from unittest import mock
def test_file_kwargs_generator(
data_context_parameterized_expectation_suite, filesystem_csv
):
base_dir = filesystem_csv
datasource = data_context_parameterized_expectation_suite.add_datasource(
"default",
module_name="great_expectations.datasource",
class_name="PandasDatasource",
batch_kwargs_generators={
"subdir_reader": {
"class_name": "SubdirReaderBatchKwargsGenerator",
"base_directory": str(base_dir),
}
},
)
generator = datasource.get_batch_kwargs_generator("subdir_reader")
known_data_asset_names = datasource.get_available_data_asset_names()
# Use set to avoid order dependency
assert set(known_data_asset_names["subdir_reader"]["names"]) == {
("f1", "file"),
("f2", "file"),
("f3", "directory"),
}
f1_batches = [
batch_kwargs["path"]
for batch_kwargs in generator.get_iterator(data_asset_name="f1")
]
assert len(f1_batches) == 1
expected_batches = [{"path": os.path.join(base_dir, "f1.csv")}]
for batch in expected_batches:
assert batch["path"] in f1_batches
f3_batches = [
batch_kwargs["path"]
for batch_kwargs in generator.get_iterator(data_asset_name="f3")
]
assert len(f3_batches) == 2
expected_batches = [
{"path": os.path.join(base_dir, "f3", "f3_20190101.csv")},
{"path": os.path.join(base_dir, "f3", "f3_20190102.csv")},
]
for batch in expected_batches:
assert batch["path"] in f3_batches
def test_glob_reader_generator(basic_pandas_datasource, tmp_path_factory):
"""Provides an example of how glob generator works: we specify our own
names for data_assets, and an associated glob; the generator
will take care of providing batches consisting of one file per
batch corresponding to the glob."""
basedir = str(tmp_path_factory.mktemp("test_glob_reader_generator"))
with open(os.path.join(basedir, "f1.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f2.csv"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f3.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f4.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f5.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f6.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f7.xls"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f8.parquet"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f9.xls"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f0.json"), "w") as outfile:
outfile.write("\n\n\n")
g2 = GlobReaderBatchKwargsGenerator(
base_directory=basedir,
datasource=basic_pandas_datasource,
asset_globs={"blargs": {"glob": "*.blarg"}, "fs": {"glob": "f*"}},
)
g2_assets = g2.get_available_data_asset_names()
# Use set in test to avoid order issues
assert set(g2_assets["names"]) == {("blargs", "path"), ("fs", "path")}
blargs_kwargs = [x["path"] for x in g2.get_iterator(data_asset_name="blargs")]
real_blargs = [
os.path.join(basedir, "f1.blarg"),
os.path.join(basedir, "f3.blarg"),
os.path.join(basedir, "f4.blarg"),
os.path.join(basedir, "f5.blarg"),
os.path.join(basedir, "f6.blarg"),
]
for kwargs in real_blargs:
assert kwargs in blargs_kwargs
assert len(blargs_kwargs) == len(real_blargs)
def test_file_kwargs_generator_extensions(tmp_path_factory):
"""csv, xls, parquet, json should be recognized file extensions"""
basedir = str(tmp_path_factory.mktemp("test_file_kwargs_generator_extensions"))
# Do not include: invalid extension
with open(os.path.join(basedir, "f1.blarg"), "w") as outfile:
outfile.write("\n\n\n")
# Include
with open(os.path.join(basedir, "f2.csv"), "w") as outfile:
outfile.write("\n\n\n")
# Do not include: valid subdir, but no valid files in it
os.mkdir(os.path.join(basedir, "f3"))
with open(os.path.join(basedir, "f3", "f3_1.blarg"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f3", "f3_2.blarg"), "w") as outfile:
outfile.write("\n\n\n")
# Include: valid subdir with valid files
os.mkdir(os.path.join(basedir, "f4"))
with open(os.path.join(basedir, "f4", "f4_1.csv"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f4", "f4_2.csv"), "w") as outfile:
outfile.write("\n\n\n")
# Do not include: valid extension, but dot prefix
with open(os.path.join(basedir, ".f5.csv"), "w") as outfile:
outfile.write("\n\n\n")
# Include: valid extensions
with open(os.path.join(basedir, "f6.tsv"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f7.xls"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f8.parquet"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f9.xls"), "w") as outfile:
outfile.write("\n\n\n")
with open(os.path.join(basedir, "f0.json"), "w") as outfile:
outfile.write("\n\n\n")
g1 = SubdirReaderBatchKwargsGenerator(datasource="foo", base_directory=basedir)
g1_assets = g1.get_available_data_asset_names()
# Use set in test to avoid order issues
assert set(g1_assets["names"]) == {
("f7", "file"),
("f4", "directory"),
("f6", "file"),
("f0", "file"),
("f2", "file"),
("f9", "file"),
("f8", "file"),
}
def test_databricks_generator(basic_sparkdf_datasource):
generator = DatabricksTableBatchKwargsGenerator(datasource=basic_sparkdf_datasource)
available_assets = generator.get_available_data_asset_names()
# We have no tables available
assert available_assets == {"names": []}
databricks_kwargs_iterator = generator.get_iterator(data_asset_name="foo")
kwargs = [batch_kwargs for batch_kwargs in databricks_kwargs_iterator]
assert "select * from" in kwargs[0]["query"].lower()
| apache-2.0 |
thumbimigwe/echorizr | lib/python2.7/site-packages/django/template/loaders/cached.py | 19 | 7094 | """
Wrapper class that takes a list of template loaders as an argument and attempts
to load templates from them in order, caching the result.
"""
import hashlib
import warnings
from django.template import Origin, Template, TemplateDoesNotExist
from django.template.backends.django import copy_exception
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes
from django.utils.inspect import func_supports_parameter
from .base import Loader as BaseLoader
class Loader(BaseLoader):
def __init__(self, engine, loaders):
self.template_cache = {}
self.find_template_cache = {} # RemovedInDjango20Warning
self.get_template_cache = {}
self.loaders = engine.get_template_loaders(loaders)
super(Loader, self).__init__(engine)
def get_contents(self, origin):
return origin.loader.get_contents(origin)
def get_template(self, template_name, template_dirs=None, skip=None):
"""
Perform the caching that gives this loader its name. Often many of the
templates attempted will be missing, so memory use is of concern here.
To keep it in check, caching behavior is a little complicated when a
template is not found. See ticket #26306 for more details.
With template debugging disabled, cache the TemplateDoesNotExist class
for every missing template and raise a new instance of it after
fetching it from the cache.
With template debugging enabled, a unique TemplateDoesNotExist object
is cached for each missing template to preserve debug data. When
raising an exception, Python sets __traceback__, __context__, and
__cause__ attributes on it. Those attributes can contain references to
all sorts of objects up the call chain and caching them creates a
memory leak. Thus, unraised copies of the exceptions are cached and
copies of those copies are raised after they're fetched from the cache.
"""
key = self.cache_key(template_name, template_dirs, skip)
cached = self.get_template_cache.get(key)
if cached:
if isinstance(cached, type) and issubclass(cached, TemplateDoesNotExist):
raise cached(template_name)
elif isinstance(cached, TemplateDoesNotExist):
raise copy_exception(cached)
return cached
try:
template = super(Loader, self).get_template(
template_name, template_dirs, skip,
)
except TemplateDoesNotExist as e:
self.get_template_cache[key] = copy_exception(e) if self.engine.debug else TemplateDoesNotExist
raise
else:
self.get_template_cache[key] = template
return template
def get_template_sources(self, template_name, template_dirs=None):
for loader in self.loaders:
args = [template_name]
# RemovedInDjango20Warning: Add template_dirs for compatibility
# with old loaders
if func_supports_parameter(loader.get_template_sources, 'template_dirs'):
args.append(template_dirs)
for origin in loader.get_template_sources(*args):
yield origin
def cache_key(self, template_name, template_dirs, skip=None):
"""
Generate a cache key for the template name, dirs, and skip.
If skip is provided, only origins that match template_name are included
in the cache key. This ensures each template is only parsed and cached
once if contained in different extend chains like:
x -> a -> a
y -> a -> a
z -> a -> a
"""
dirs_prefix = ''
skip_prefix = ''
if skip:
matching = [origin.name for origin in skip if origin.template_name == template_name]
if matching:
skip_prefix = self.generate_hash(matching)
if template_dirs:
dirs_prefix = self.generate_hash(template_dirs)
return ("%s-%s-%s" % (template_name, skip_prefix, dirs_prefix)).strip('-')
def generate_hash(self, values):
return hashlib.sha1(force_bytes('|'.join(values))).hexdigest()
@property
def supports_recursion(self):
"""
RemovedInDjango20Warning: This is an internal property used by the
ExtendsNode during the deprecation of non-recursive loaders.
"""
return all(hasattr(loader, 'get_contents') for loader in self.loaders)
def find_template(self, name, dirs=None):
"""
RemovedInDjango20Warning: An internal method to lookup the template
name in all the configured loaders.
"""
key = self.cache_key(name, dirs)
try:
result = self.find_template_cache[key]
except KeyError:
result = None
for loader in self.loaders:
try:
template, display_name = loader(name, dirs)
except TemplateDoesNotExist:
pass
else:
origin = Origin(
name=display_name,
template_name=name,
loader=loader,
)
result = template, origin
break
self.find_template_cache[key] = result
if result:
return result
else:
self.template_cache[key] = TemplateDoesNotExist
raise TemplateDoesNotExist(name)
def load_template(self, template_name, template_dirs=None):
warnings.warn(
'The load_template() method is deprecated. Use get_template() '
'instead.', RemovedInDjango20Warning,
)
key = self.cache_key(template_name, template_dirs)
template_tuple = self.template_cache.get(key)
# A cached previous failure:
if template_tuple is TemplateDoesNotExist:
raise TemplateDoesNotExist(template_name)
elif template_tuple is None:
template, origin = self.find_template(template_name, template_dirs)
if not hasattr(template, 'render'):
try:
template = Template(template, origin, template_name, self.engine)
except TemplateDoesNotExist:
# If compiling the template we found raises TemplateDoesNotExist,
# back off to returning the source and display name for the template
# we were asked to load. This allows for correct identification (later)
# of the actual template that does not exist.
self.template_cache[key] = (template, origin)
self.template_cache[key] = (template, None)
return self.template_cache[key]
def reset(self):
"Empty the template cache."
self.template_cache.clear()
self.find_template_cache.clear() # RemovedInDjango20Warning
self.get_template_cache.clear()
| mit |
glls/Cinnamon | files/usr/share/cinnamon/cinnamon-settings/modules/cs_workspaces.py | 3 | 2070 | #!/usr/bin/python3
from SettingsWidgets import SidePage
from xapp.GSettingsWidgets import *
class Module:
name = "workspaces"
category = "prefs"
comment = _("Manage workspace preferences")
def __init__(self, content_box):
keywords = _("workspace, osd, expo, monitor")
sidePage = SidePage(_("Workspaces"), "cs-workspaces", keywords, content_box, module=self)
self.sidePage = sidePage
def shouldLoad(self):
return True
def on_module_selected(self):
if not self.loaded:
print("Loading Workspaces module")
page = SettingsPage()
self.sidePage.add_widget(page)
settings = page.add_section(_("Workspace Options"))
switch = GSettingsSwitch(_("Enable workspace OSD"), "org.cinnamon", "workspace-osd-visible")
settings.add_row(switch)
switch = GSettingsSwitch(_("Allow cycling through workspaces"), "org.cinnamon.muffin", "workspace-cycle")
settings.add_row(switch)
switch = GSettingsSwitch(_("Only use workspaces on primary monitor (requires Cinnamon restart)"), "org.cinnamon.muffin", "workspaces-only-on-primary")
settings.add_row(switch)
switch = GSettingsSwitch(_("Display Expo view as a grid"), "org.cinnamon", "workspace-expo-view-as-grid")
settings.add_row(switch)
# Edge Flip doesn't work well, so it's there in gsettings, but we don't show it to users yet
# switch = GSettingsSwitch(_("Enable Edge Flip"), "org.cinnamon", "enable-edge-flip")
# settings.add_row(switch)
# spin = GSettingsSpinButton(_("Edge Flip delay"), "org.cinnamon", "edge-flip-delay", mini=1, maxi=3000, units=_("ms"))
# settings.add_reveal_row(spin, "org.cinnamon", "enable-edge-flip")
switch = GSettingsSwitch(_("Invert the left and right arrow key directions used to shift workspaces during a window drag"), "org.cinnamon.muffin", "invert-workspace-flip-direction")
settings.add_row(switch)
| gpl-2.0 |
smunix/ns-3-rfid | src/wifi/bindings/modulegen__gcc_LP64.py | 2 | 742364 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.wifi', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## qos-utils.h (module 'wifi'): ns3::AcIndex [enumeration]
module.add_enum('AcIndex', ['AC_BE', 'AC_BK', 'AC_VI', 'AC_VO', 'AC_BE_NQOS', 'AC_UNDEF'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacType [enumeration]
module.add_enum('WifiMacType', ['WIFI_MAC_CTL_RTS', 'WIFI_MAC_CTL_CTS', 'WIFI_MAC_CTL_ACK', 'WIFI_MAC_CTL_BACKREQ', 'WIFI_MAC_CTL_BACKRESP', 'WIFI_MAC_MGT_BEACON', 'WIFI_MAC_MGT_ASSOCIATION_REQUEST', 'WIFI_MAC_MGT_ASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_DISASSOCIATION', 'WIFI_MAC_MGT_REASSOCIATION_REQUEST', 'WIFI_MAC_MGT_REASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_PROBE_REQUEST', 'WIFI_MAC_MGT_PROBE_RESPONSE', 'WIFI_MAC_MGT_AUTHENTICATION', 'WIFI_MAC_MGT_DEAUTHENTICATION', 'WIFI_MAC_MGT_ACTION', 'WIFI_MAC_MGT_ACTION_NO_ACK', 'WIFI_MAC_MGT_MULTIHOP_ACTION', 'WIFI_MAC_DATA', 'WIFI_MAC_DATA_CFACK', 'WIFI_MAC_DATA_CFPOLL', 'WIFI_MAC_DATA_CFACK_CFPOLL', 'WIFI_MAC_DATA_NULL', 'WIFI_MAC_DATA_NULL_CFACK', 'WIFI_MAC_DATA_NULL_CFPOLL', 'WIFI_MAC_DATA_NULL_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA', 'WIFI_MAC_QOSDATA_CFACK', 'WIFI_MAC_QOSDATA_CFPOLL', 'WIFI_MAC_QOSDATA_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA_NULL', 'WIFI_MAC_QOSDATA_NULL_CFPOLL', 'WIFI_MAC_QOSDATA_NULL_CFACK_CFPOLL'])
## wifi-preamble.h (module 'wifi'): ns3::WifiPreamble [enumeration]
module.add_enum('WifiPreamble', ['WIFI_PREAMBLE_LONG', 'WIFI_PREAMBLE_SHORT'])
## wifi-mode.h (module 'wifi'): ns3::WifiModulationClass [enumeration]
module.add_enum('WifiModulationClass', ['WIFI_MOD_CLASS_UNKNOWN', 'WIFI_MOD_CLASS_IR', 'WIFI_MOD_CLASS_FHSS', 'WIFI_MOD_CLASS_DSSS', 'WIFI_MOD_CLASS_ERP_PBCC', 'WIFI_MOD_CLASS_DSSS_OFDM', 'WIFI_MOD_CLASS_ERP_OFDM', 'WIFI_MOD_CLASS_OFDM', 'WIFI_MOD_CLASS_HT'])
## wifi-phy-standard.h (module 'wifi'): ns3::WifiPhyStandard [enumeration]
module.add_enum('WifiPhyStandard', ['WIFI_PHY_STANDARD_80211a', 'WIFI_PHY_STANDARD_80211b', 'WIFI_PHY_STANDARD_80211g', 'WIFI_PHY_STANDARD_80211_10MHZ', 'WIFI_PHY_STANDARD_80211_5MHZ', 'WIFI_PHY_STANDARD_holland', 'WIFI_PHY_STANDARD_80211p_CCH', 'WIFI_PHY_STANDARD_80211p_SCH'])
## qos-tag.h (module 'wifi'): ns3::UserPriority [enumeration]
module.add_enum('UserPriority', ['UP_BK', 'UP_BE', 'UP_EE', 'UP_CL', 'UP_VI', 'UP_VO', 'UP_NC'])
## wifi-mode.h (module 'wifi'): ns3::WifiCodeRate [enumeration]
module.add_enum('WifiCodeRate', ['WIFI_CODE_RATE_UNDEFINED', 'WIFI_CODE_RATE_3_4', 'WIFI_CODE_RATE_2_3', 'WIFI_CODE_RATE_1_2'])
## edca-txop-n.h (module 'wifi'): ns3::TypeOfStation [enumeration]
module.add_enum('TypeOfStation', ['STA', 'AP', 'ADHOC_STA', 'MESH'])
## ctrl-headers.h (module 'wifi'): ns3::BlockAckType [enumeration]
module.add_enum('BlockAckType', ['BASIC_BLOCK_ACK', 'COMPRESSED_BLOCK_ACK', 'MULTI_TID_BLOCK_ACK'])
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper [class]
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice [class]
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## athstats-helper.h (module 'wifi'): ns3::AthstatsHelper [class]
module.add_class('AthstatsHelper')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## block-ack-manager.h (module 'wifi'): ns3::Bar [struct]
module.add_class('Bar')
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement [class]
module.add_class('BlockAckAgreement')
## block-ack-cache.h (module 'wifi'): ns3::BlockAckCache [class]
module.add_class('BlockAckCache')
## block-ack-manager.h (module 'wifi'): ns3::BlockAckManager [class]
module.add_class('BlockAckManager')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## capability-information.h (module 'wifi'): ns3::CapabilityInformation [class]
module.add_class('CapabilityInformation')
## dcf-manager.h (module 'wifi'): ns3::DcfManager [class]
module.add_class('DcfManager')
## dcf-manager.h (module 'wifi'): ns3::DcfState [class]
module.add_class('DcfState', allow_subclassing=True)
## dsss-error-rate-model.h (module 'wifi'): ns3::DsssErrorRateModel [class]
module.add_class('DsssErrorRateModel')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper [class]
module.add_class('InterferenceHelper')
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer [struct]
module.add_class('SnrPer', outer_class=root_module['ns3::InterferenceHelper'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac-low.h (module 'wifi'): ns3::MacLowBlockAckEventListener [class]
module.add_class('MacLowBlockAckEventListener', allow_subclassing=True)
## mac-low.h (module 'wifi'): ns3::MacLowDcfListener [class]
module.add_class('MacLowDcfListener', allow_subclassing=True)
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionListener [class]
module.add_class('MacLowTransmissionListener', allow_subclassing=True)
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionParameters [class]
module.add_class('MacLowTransmissionParameters')
## mac-rx-middle.h (module 'wifi'): ns3::MacRxMiddle [class]
module.add_class('MacRxMiddle')
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement [class]
module.add_class('OriginatorBlockAckAgreement', parent=root_module['ns3::BlockAckAgreement'])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::State [enumeration]
module.add_enum('State', ['PENDING', 'ESTABLISHED', 'INACTIVE', 'UNSUCCESSFUL'], outer_class=root_module['ns3::OriginatorBlockAckAgreement'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## pcap-file.h (module 'network'): ns3::PcapFile [class]
module.add_class('PcapFile', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [class]
module.add_class('PcapHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [enumeration]
module.add_enum('', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice [class]
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## random-variable.h (module 'core'): ns3::RandomVariable [class]
module.add_class('RandomVariable', import_from_module='ns.core')
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo [struct]
module.add_class('RateInfo')
## random-variable.h (module 'core'): ns3::SeedManager [class]
module.add_class('SeedManager', import_from_module='ns.core')
## random-variable.h (module 'core'): ns3::SequentialVariable [class]
module.add_class('SequentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## status-code.h (module 'wifi'): ns3::StatusCode [class]
module.add_class('StatusCode')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## random-variable.h (module 'core'): ns3::TriangularVariable [class]
module.add_class('TriangularVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## random-variable.h (module 'core'): ns3::UniformVariable [class]
module.add_class('UniformVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::WeibullVariable [class]
module.add_class('WeibullVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## wifi-helper.h (module 'wifi'): ns3::WifiHelper [class]
module.add_class('WifiHelper')
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper [class]
module.add_class('WifiMacHelper', allow_subclassing=True)
## wifi-mode.h (module 'wifi'): ns3::WifiMode [class]
module.add_class('WifiMode')
## wifi-mode.h (module 'wifi'): ns3::WifiModeFactory [class]
module.add_class('WifiModeFactory')
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper [class]
module.add_class('WifiPhyHelper', allow_subclassing=True)
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener [class]
module.add_class('WifiPhyListener', allow_subclassing=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation [struct]
module.add_class('WifiRemoteStation')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo [class]
module.add_class('WifiRemoteStationInfo')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState [struct]
module.add_class('WifiRemoteStationState')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState [enumeration]
module.add_enum('', ['BRAND_NEW', 'DISASSOC', 'WAIT_ASSOC_TX_OK', 'GOT_ASSOC_TX_OK'], outer_class=root_module['ns3::WifiRemoteStationState'])
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiChannelHelper [class]
module.add_class('YansWifiChannelHelper')
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiPhyHelper [class]
module.add_class('YansWifiPhyHelper', parent=[root_module['ns3::WifiPhyHelper'], root_module['ns3::PcapHelperForDevice'], root_module['ns3::AsciiTraceHelperForDevice']])
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiPhyHelper::SupportedPcapDataLinkTypes [enumeration]
module.add_enum('SupportedPcapDataLinkTypes', ['DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::YansWifiPhyHelper'])
## random-variable.h (module 'core'): ns3::ZetaVariable [class]
module.add_class('ZetaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ZipfVariable [class]
module.add_class('ZipfVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## random-variable.h (module 'core'): ns3::ConstantVariable [class]
module.add_class('ConstantVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::DeterministicVariable [class]
module.add_class('DeterministicVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::EmpiricalVariable [class]
module.add_class('EmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ErlangVariable [class]
module.add_class('ErlangVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ExponentialVariable [class]
module.add_class('ExponentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::GammaVariable [class]
module.add_class('GammaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable [class]
module.add_class('IntEmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::EmpiricalVariable'])
## random-variable.h (module 'core'): ns3::LogNormalVariable [class]
module.add_class('LogNormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader [class]
module.add_class('MgtAddBaRequestHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader [class]
module.add_class('MgtAddBaResponseHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader [class]
module.add_class('MgtAssocRequestHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader [class]
module.add_class('MgtAssocResponseHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader [class]
module.add_class('MgtDelBaHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader [class]
module.add_class('MgtProbeRequestHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader [class]
module.add_class('MgtProbeResponseHeader', parent=root_module['ns3::Header'])
## random-variable.h (module 'core'): ns3::NormalVariable [class]
module.add_class('NormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## nqos-wifi-mac-helper.h (module 'wifi'): ns3::NqosWifiMacHelper [class]
module.add_class('NqosWifiMacHelper', parent=root_module['ns3::WifiMacHelper'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable.h (module 'core'): ns3::ParetoVariable [class]
module.add_class('ParetoVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper [class]
module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object'])
## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel [class]
module.add_class('PropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::Object'])
## propagation-loss-model.h (module 'propagation'): ns3::PropagationLossModel [class]
module.add_class('PropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::Object'])
## qos-tag.h (module 'wifi'): ns3::QosTag [class]
module.add_class('QosTag', parent=root_module['ns3::Tag'])
## qos-wifi-mac-helper.h (module 'wifi'): ns3::QosWifiMacHelper [class]
module.add_class('QosWifiMacHelper', parent=root_module['ns3::WifiMacHelper'])
## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel [class]
module.add_class('RandomPropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationDelayModel'])
## propagation-loss-model.h (module 'propagation'): ns3::RandomPropagationLossModel [class]
module.add_class('RandomPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## propagation-loss-model.h (module 'propagation'): ns3::RangePropagationLossModel [class]
module.add_class('RangePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::InterferenceHelper::Event', 'ns3::empty', 'ns3::DefaultDeleter<ns3::InterferenceHelper::Event>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::WifiInformationElement', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiInformationElement>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## propagation-loss-model.h (module 'propagation'): ns3::ThreeLogDistancePropagationLossModel [class]
module.add_class('ThreeLogDistancePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## propagation-loss-model.h (module 'propagation'): ns3::TwoRayGroundPropagationLossModel [class]
module.add_class('TwoRayGroundPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader [class]
module.add_class('WifiActionHeader', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::CategoryValue [enumeration]
module.add_enum('CategoryValue', ['BLOCK_ACK', 'MESH_PEERING_MGT', 'MESH_LINK_METRIC', 'MESH_PATH_SELECTION', 'MESH_INTERWORKING', 'MESH_RESOURCE_COORDINATION', 'MESH_PROXY_FORWARDING'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::PeerLinkMgtActionValue [enumeration]
module.add_enum('PeerLinkMgtActionValue', ['PEER_LINK_OPEN', 'PEER_LINK_CONFIRM', 'PEER_LINK_CLOSE'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::LinkMetricActionValue [enumeration]
module.add_enum('LinkMetricActionValue', ['LINK_METRIC_REQUEST', 'LINK_METRIC_REPORT'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::PathSelectionActionValue [enumeration]
module.add_enum('PathSelectionActionValue', ['PATH_SELECTION'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::InterworkActionValue [enumeration]
module.add_enum('InterworkActionValue', ['PORTAL_ANNOUNCEMENT'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ResourceCoordinationActionValue [enumeration]
module.add_enum('ResourceCoordinationActionValue', ['CONGESTION_CONTROL_NOTIFICATION', 'MDA_SETUP_REQUEST', 'MDA_SETUP_REPLY', 'MDAOP_ADVERTISMENT_REQUEST', 'MDAOP_ADVERTISMENTS', 'MDAOP_SET_TEARDOWN', 'BEACON_TIMING_REQUEST', 'BEACON_TIMING_RESPONSE', 'TBTT_ADJUSTMENT_REQUEST', 'MESH_CHANNEL_SWITCH_ANNOUNCEMENT'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::BlockAckActionValue [enumeration]
module.add_enum('BlockAckActionValue', ['BLOCK_ACK_ADDBA_REQUEST', 'BLOCK_ACK_ADDBA_RESPONSE', 'BLOCK_ACK_DELBA'], outer_class=root_module['ns3::WifiActionHeader'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue [union]
module.add_class('ActionValue', outer_class=root_module['ns3::WifiActionHeader'])
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement [class]
module.add_class('WifiInformationElement', parent=root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >'])
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector [class]
module.add_class('WifiInformationElementVector', parent=root_module['ns3::Header'])
## wifi-mac.h (module 'wifi'): ns3::WifiMac [class]
module.add_class('WifiMac', parent=root_module['ns3::Object'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader [class]
module.add_class('WifiMacHeader', parent=root_module['ns3::Header'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::QosAckPolicy [enumeration]
module.add_enum('QosAckPolicy', ['NORMAL_ACK', 'NO_ACK', 'NO_EXPLICIT_ACK', 'BLOCK_ACK'], outer_class=root_module['ns3::WifiMacHeader'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::AddressType [enumeration]
module.add_enum('AddressType', ['ADDR1', 'ADDR2', 'ADDR3', 'ADDR4'], outer_class=root_module['ns3::WifiMacHeader'])
## wifi-mac-queue.h (module 'wifi'): ns3::WifiMacQueue [class]
module.add_class('WifiMacQueue', parent=root_module['ns3::Object'])
## wifi-phy.h (module 'wifi'): ns3::WifiPhy [class]
module.add_class('WifiPhy', parent=root_module['ns3::Object'])
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::State [enumeration]
module.add_enum('State', ['IDLE', 'CCA_BUSY', 'TX', 'RX', 'SWITCHING'], outer_class=root_module['ns3::WifiPhy'])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager [class]
module.add_class('WifiRemoteStationManager', parent=root_module['ns3::Object'])
## yans-wifi-phy.h (module 'wifi'): ns3::YansWifiPhy [class]
module.add_class('YansWifiPhy', parent=root_module['ns3::WifiPhy'])
## aarf-wifi-manager.h (module 'wifi'): ns3::AarfWifiManager [class]
module.add_class('AarfWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## aarfcd-wifi-manager.h (module 'wifi'): ns3::AarfcdWifiManager [class]
module.add_class('AarfcdWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## amrr-wifi-manager.h (module 'wifi'): ns3::AmrrWifiManager [class]
module.add_class('AmrrWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## amsdu-subframe-header.h (module 'wifi'): ns3::AmsduSubframeHeader [class]
module.add_class('AmsduSubframeHeader', parent=root_module['ns3::Header'])
## arf-wifi-manager.h (module 'wifi'): ns3::ArfWifiManager [class]
module.add_class('ArfWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## athstats-helper.h (module 'wifi'): ns3::AthstatsWifiTraceSink [class]
module.add_class('AthstatsWifiTraceSink', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## cara-wifi-manager.h (module 'wifi'): ns3::CaraWifiManager [class]
module.add_class('CaraWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
## constant-rate-wifi-manager.h (module 'wifi'): ns3::ConstantRateWifiManager [class]
module.add_class('ConstantRateWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel [class]
module.add_class('ConstantSpeedPropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationDelayModel'])
## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel [class]
module.add_class('Cost231PropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Environment [enumeration]
module.add_enum('Environment', ['SubUrban', 'MediumCity', 'Metropolitan'], outer_class=root_module['ns3::Cost231PropagationLossModel'], import_from_module='ns.propagation')
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader [class]
module.add_class('CtrlBAckRequestHeader', parent=root_module['ns3::Header'])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader [class]
module.add_class('CtrlBAckResponseHeader', parent=root_module['ns3::Header'])
## dcf.h (module 'wifi'): ns3::Dcf [class]
module.add_class('Dcf', parent=root_module['ns3::Object'])
## double.h (module 'core'): ns3::DoubleValue [class]
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## edca-txop-n.h (module 'wifi'): ns3::EdcaTxopN [class]
module.add_class('EdcaTxopN', parent=root_module['ns3::Dcf'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## error-rate-model.h (module 'wifi'): ns3::ErrorRateModel [class]
module.add_class('ErrorRateModel', parent=root_module['ns3::Object'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE [class]
module.add_class('ExtendedSupportedRatesIE', parent=root_module['ns3::WifiInformationElement'])
## propagation-loss-model.h (module 'propagation'): ns3::FixedRssLossModel [class]
module.add_class('FixedRssLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## propagation-loss-model.h (module 'propagation'): ns3::FriisPropagationLossModel [class]
module.add_class('FriisPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## ideal-wifi-manager.h (module 'wifi'): ns3::IdealWifiManager [class]
module.add_class('IdealWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## jakes-propagation-loss-model.h (module 'propagation'): ns3::JakesPropagationLossModel [class]
module.add_class('JakesPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## propagation-loss-model.h (module 'propagation'): ns3::LogDistancePropagationLossModel [class]
module.add_class('LogDistancePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac-low.h (module 'wifi'): ns3::MacLow [class]
module.add_class('MacLow', parent=root_module['ns3::Object'])
## propagation-loss-model.h (module 'propagation'): ns3::MatrixPropagationLossModel [class]
module.add_class('MatrixPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader [class]
module.add_class('MgtBeaconHeader', parent=root_module['ns3::MgtProbeResponseHeader'])
## minstrel-wifi-manager.h (module 'wifi'): ns3::MinstrelWifiManager [class]
module.add_class('MinstrelWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## msdu-aggregator.h (module 'wifi'): ns3::MsduAggregator [class]
module.add_class('MsduAggregator', parent=root_module['ns3::Object'])
## propagation-loss-model.h (module 'propagation'): ns3::NakagamiPropagationLossModel [class]
module.add_class('NakagamiPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nist-error-rate-model.h (module 'wifi'): ns3::NistErrorRateModel [class]
module.add_class('NistErrorRateModel', parent=root_module['ns3::ErrorRateModel'])
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## onoe-wifi-manager.h (module 'wifi'): ns3::OnoeWifiManager [class]
module.add_class('OnoeWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable.h (module 'core'): ns3::RandomVariableChecker [class]
module.add_class('RandomVariableChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## random-variable.h (module 'core'): ns3::RandomVariableValue [class]
module.add_class('RandomVariableValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## regular-wifi-mac.h (module 'wifi'): ns3::RegularWifiMac [class]
module.add_class('RegularWifiMac', parent=root_module['ns3::WifiMac'])
## rraa-wifi-manager.h (module 'wifi'): ns3::RraaWifiManager [class]
module.add_class('RraaWifiManager', parent=root_module['ns3::WifiRemoteStationManager'])
## ssid.h (module 'wifi'): ns3::Ssid [class]
module.add_class('Ssid', parent=root_module['ns3::WifiInformationElement'])
## ssid.h (module 'wifi'): ns3::SsidChecker [class]
module.add_class('SsidChecker', parent=root_module['ns3::AttributeChecker'])
## ssid.h (module 'wifi'): ns3::SsidValue [class]
module.add_class('SsidValue', parent=root_module['ns3::AttributeValue'])
## sta-wifi-mac.h (module 'wifi'): ns3::StaWifiMac [class]
module.add_class('StaWifiMac', parent=root_module['ns3::RegularWifiMac'])
## supported-rates.h (module 'wifi'): ns3::SupportedRates [class]
module.add_class('SupportedRates', parent=root_module['ns3::WifiInformationElement'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## uinteger.h (module 'core'): ns3::UintegerValue [class]
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## wifi-channel.h (module 'wifi'): ns3::WifiChannel [class]
module.add_class('WifiChannel', parent=root_module['ns3::Channel'])
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker [class]
module.add_class('WifiModeChecker', parent=root_module['ns3::AttributeChecker'])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue [class]
module.add_class('WifiModeValue', parent=root_module['ns3::AttributeValue'])
## wifi-net-device.h (module 'wifi'): ns3::WifiNetDevice [class]
module.add_class('WifiNetDevice', parent=root_module['ns3::NetDevice'])
## yans-error-rate-model.h (module 'wifi'): ns3::YansErrorRateModel [class]
module.add_class('YansErrorRateModel', parent=root_module['ns3::ErrorRateModel'])
## yans-wifi-channel.h (module 'wifi'): ns3::YansWifiChannel [class]
module.add_class('YansWifiChannel', parent=root_module['ns3::WifiChannel'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## adhoc-wifi-mac.h (module 'wifi'): ns3::AdhocWifiMac [class]
module.add_class('AdhocWifiMac', parent=root_module['ns3::RegularWifiMac'])
## ap-wifi-mac.h (module 'wifi'): ns3::ApWifiMac [class]
module.add_class('ApWifiMac', parent=root_module['ns3::RegularWifiMac'])
## dca-txop.h (module 'wifi'): ns3::DcaTxop [class]
module.add_class('DcaTxop', parent=root_module['ns3::Dcf'])
module.add_container('ns3::WifiModeList', 'ns3::WifiMode', container_type='vector')
module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader >', container_type='list')
typehandlers.add_type_alias('uint8_t', 'ns3::WifiInformationElementId')
typehandlers.add_type_alias('uint8_t*', 'ns3::WifiInformationElementId*')
typehandlers.add_type_alias('uint8_t&', 'ns3::WifiInformationElementId&')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >', 'ns3::WifiModeListIterator')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >*', 'ns3::WifiModeListIterator*')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >&', 'ns3::WifiModeListIterator&')
typehandlers.add_type_alias('std::vector< ns3::RateInfo, std::allocator< ns3::RateInfo > >', 'ns3::MinstrelRate')
typehandlers.add_type_alias('std::vector< ns3::RateInfo, std::allocator< ns3::RateInfo > >*', 'ns3::MinstrelRate*')
typehandlers.add_type_alias('std::vector< ns3::RateInfo, std::allocator< ns3::RateInfo > >&', 'ns3::MinstrelRate&')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >', 'ns3::WifiModeList')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >*', 'ns3::WifiModeList*')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >&', 'ns3::WifiModeList&')
typehandlers.add_type_alias('std::vector< std::vector< unsigned int, std::allocator< unsigned int > >, std::allocator< std::vector< unsigned int, std::allocator< unsigned int > > > >', 'ns3::SampleRate')
typehandlers.add_type_alias('std::vector< std::vector< unsigned int, std::allocator< unsigned int > >, std::allocator< std::vector< unsigned int, std::allocator< unsigned int > > > >*', 'ns3::SampleRate*')
typehandlers.add_type_alias('std::vector< std::vector< unsigned int, std::allocator< unsigned int > >, std::allocator< std::vector< unsigned int, std::allocator< unsigned int > > > >&', 'ns3::SampleRate&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_methods(root_module, root_module['ns3::AsciiTraceHelperForDevice'])
register_Ns3AthstatsHelper_methods(root_module, root_module['ns3::AthstatsHelper'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Bar_methods(root_module, root_module['ns3::Bar'])
register_Ns3BlockAckAgreement_methods(root_module, root_module['ns3::BlockAckAgreement'])
register_Ns3BlockAckCache_methods(root_module, root_module['ns3::BlockAckCache'])
register_Ns3BlockAckManager_methods(root_module, root_module['ns3::BlockAckManager'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3CapabilityInformation_methods(root_module, root_module['ns3::CapabilityInformation'])
register_Ns3DcfManager_methods(root_module, root_module['ns3::DcfManager'])
register_Ns3DcfState_methods(root_module, root_module['ns3::DcfState'])
register_Ns3DsssErrorRateModel_methods(root_module, root_module['ns3::DsssErrorRateModel'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3InterferenceHelper_methods(root_module, root_module['ns3::InterferenceHelper'])
register_Ns3InterferenceHelperSnrPer_methods(root_module, root_module['ns3::InterferenceHelper::SnrPer'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3MacLowBlockAckEventListener_methods(root_module, root_module['ns3::MacLowBlockAckEventListener'])
register_Ns3MacLowDcfListener_methods(root_module, root_module['ns3::MacLowDcfListener'])
register_Ns3MacLowTransmissionListener_methods(root_module, root_module['ns3::MacLowTransmissionListener'])
register_Ns3MacLowTransmissionParameters_methods(root_module, root_module['ns3::MacLowTransmissionParameters'])
register_Ns3MacRxMiddle_methods(root_module, root_module['ns3::MacRxMiddle'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3OriginatorBlockAckAgreement_methods(root_module, root_module['ns3::OriginatorBlockAckAgreement'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3PcapFile_methods(root_module, root_module['ns3::PcapFile'])
register_Ns3PcapHelper_methods(root_module, root_module['ns3::PcapHelper'])
register_Ns3PcapHelperForDevice_methods(root_module, root_module['ns3::PcapHelperForDevice'])
register_Ns3RandomVariable_methods(root_module, root_module['ns3::RandomVariable'])
register_Ns3RateInfo_methods(root_module, root_module['ns3::RateInfo'])
register_Ns3SeedManager_methods(root_module, root_module['ns3::SeedManager'])
register_Ns3SequentialVariable_methods(root_module, root_module['ns3::SequentialVariable'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3StatusCode_methods(root_module, root_module['ns3::StatusCode'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TriangularVariable_methods(root_module, root_module['ns3::TriangularVariable'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3UniformVariable_methods(root_module, root_module['ns3::UniformVariable'])
register_Ns3WeibullVariable_methods(root_module, root_module['ns3::WeibullVariable'])
register_Ns3WifiHelper_methods(root_module, root_module['ns3::WifiHelper'])
register_Ns3WifiMacHelper_methods(root_module, root_module['ns3::WifiMacHelper'])
register_Ns3WifiMode_methods(root_module, root_module['ns3::WifiMode'])
register_Ns3WifiModeFactory_methods(root_module, root_module['ns3::WifiModeFactory'])
register_Ns3WifiPhyHelper_methods(root_module, root_module['ns3::WifiPhyHelper'])
register_Ns3WifiPhyListener_methods(root_module, root_module['ns3::WifiPhyListener'])
register_Ns3WifiRemoteStation_methods(root_module, root_module['ns3::WifiRemoteStation'])
register_Ns3WifiRemoteStationInfo_methods(root_module, root_module['ns3::WifiRemoteStationInfo'])
register_Ns3WifiRemoteStationState_methods(root_module, root_module['ns3::WifiRemoteStationState'])
register_Ns3YansWifiChannelHelper_methods(root_module, root_module['ns3::YansWifiChannelHelper'])
register_Ns3YansWifiPhyHelper_methods(root_module, root_module['ns3::YansWifiPhyHelper'])
register_Ns3ZetaVariable_methods(root_module, root_module['ns3::ZetaVariable'])
register_Ns3ZipfVariable_methods(root_module, root_module['ns3::ZipfVariable'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3ConstantVariable_methods(root_module, root_module['ns3::ConstantVariable'])
register_Ns3DeterministicVariable_methods(root_module, root_module['ns3::DeterministicVariable'])
register_Ns3EmpiricalVariable_methods(root_module, root_module['ns3::EmpiricalVariable'])
register_Ns3ErlangVariable_methods(root_module, root_module['ns3::ErlangVariable'])
register_Ns3ExponentialVariable_methods(root_module, root_module['ns3::ExponentialVariable'])
register_Ns3GammaVariable_methods(root_module, root_module['ns3::GammaVariable'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3IntEmpiricalVariable_methods(root_module, root_module['ns3::IntEmpiricalVariable'])
register_Ns3LogNormalVariable_methods(root_module, root_module['ns3::LogNormalVariable'])
register_Ns3MgtAddBaRequestHeader_methods(root_module, root_module['ns3::MgtAddBaRequestHeader'])
register_Ns3MgtAddBaResponseHeader_methods(root_module, root_module['ns3::MgtAddBaResponseHeader'])
register_Ns3MgtAssocRequestHeader_methods(root_module, root_module['ns3::MgtAssocRequestHeader'])
register_Ns3MgtAssocResponseHeader_methods(root_module, root_module['ns3::MgtAssocResponseHeader'])
register_Ns3MgtDelBaHeader_methods(root_module, root_module['ns3::MgtDelBaHeader'])
register_Ns3MgtProbeRequestHeader_methods(root_module, root_module['ns3::MgtProbeRequestHeader'])
register_Ns3MgtProbeResponseHeader_methods(root_module, root_module['ns3::MgtProbeResponseHeader'])
register_Ns3NormalVariable_methods(root_module, root_module['ns3::NormalVariable'])
register_Ns3NqosWifiMacHelper_methods(root_module, root_module['ns3::NqosWifiMacHelper'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3ParetoVariable_methods(root_module, root_module['ns3::ParetoVariable'])
register_Ns3PcapFileWrapper_methods(root_module, root_module['ns3::PcapFileWrapper'])
register_Ns3PropagationDelayModel_methods(root_module, root_module['ns3::PropagationDelayModel'])
register_Ns3PropagationLossModel_methods(root_module, root_module['ns3::PropagationLossModel'])
register_Ns3QosTag_methods(root_module, root_module['ns3::QosTag'])
register_Ns3QosWifiMacHelper_methods(root_module, root_module['ns3::QosWifiMacHelper'])
register_Ns3RandomPropagationDelayModel_methods(root_module, root_module['ns3::RandomPropagationDelayModel'])
register_Ns3RandomPropagationLossModel_methods(root_module, root_module['ns3::RandomPropagationLossModel'])
register_Ns3RangePropagationLossModel_methods(root_module, root_module['ns3::RangePropagationLossModel'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3InterferenceHelperEvent_Ns3Empty_Ns3DefaultDeleter__lt__ns3InterferenceHelperEvent__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3SimpleRefCount__Ns3WifiInformationElement_Ns3Empty_Ns3DefaultDeleter__lt__ns3WifiInformationElement__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >'])
register_Ns3ThreeLogDistancePropagationLossModel_methods(root_module, root_module['ns3::ThreeLogDistancePropagationLossModel'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TwoRayGroundPropagationLossModel_methods(root_module, root_module['ns3::TwoRayGroundPropagationLossModel'])
register_Ns3WifiActionHeader_methods(root_module, root_module['ns3::WifiActionHeader'])
register_Ns3WifiActionHeaderActionValue_methods(root_module, root_module['ns3::WifiActionHeader::ActionValue'])
register_Ns3WifiInformationElement_methods(root_module, root_module['ns3::WifiInformationElement'])
register_Ns3WifiInformationElementVector_methods(root_module, root_module['ns3::WifiInformationElementVector'])
register_Ns3WifiMac_methods(root_module, root_module['ns3::WifiMac'])
register_Ns3WifiMacHeader_methods(root_module, root_module['ns3::WifiMacHeader'])
register_Ns3WifiMacQueue_methods(root_module, root_module['ns3::WifiMacQueue'])
register_Ns3WifiPhy_methods(root_module, root_module['ns3::WifiPhy'])
register_Ns3WifiRemoteStationManager_methods(root_module, root_module['ns3::WifiRemoteStationManager'])
register_Ns3YansWifiPhy_methods(root_module, root_module['ns3::YansWifiPhy'])
register_Ns3AarfWifiManager_methods(root_module, root_module['ns3::AarfWifiManager'])
register_Ns3AarfcdWifiManager_methods(root_module, root_module['ns3::AarfcdWifiManager'])
register_Ns3AmrrWifiManager_methods(root_module, root_module['ns3::AmrrWifiManager'])
register_Ns3AmsduSubframeHeader_methods(root_module, root_module['ns3::AmsduSubframeHeader'])
register_Ns3ArfWifiManager_methods(root_module, root_module['ns3::ArfWifiManager'])
register_Ns3AthstatsWifiTraceSink_methods(root_module, root_module['ns3::AthstatsWifiTraceSink'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3CaraWifiManager_methods(root_module, root_module['ns3::CaraWifiManager'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3ConstantRateWifiManager_methods(root_module, root_module['ns3::ConstantRateWifiManager'])
register_Ns3ConstantSpeedPropagationDelayModel_methods(root_module, root_module['ns3::ConstantSpeedPropagationDelayModel'])
register_Ns3Cost231PropagationLossModel_methods(root_module, root_module['ns3::Cost231PropagationLossModel'])
register_Ns3CtrlBAckRequestHeader_methods(root_module, root_module['ns3::CtrlBAckRequestHeader'])
register_Ns3CtrlBAckResponseHeader_methods(root_module, root_module['ns3::CtrlBAckResponseHeader'])
register_Ns3Dcf_methods(root_module, root_module['ns3::Dcf'])
register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])
register_Ns3EdcaTxopN_methods(root_module, root_module['ns3::EdcaTxopN'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErrorRateModel_methods(root_module, root_module['ns3::ErrorRateModel'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExtendedSupportedRatesIE_methods(root_module, root_module['ns3::ExtendedSupportedRatesIE'])
register_Ns3FixedRssLossModel_methods(root_module, root_module['ns3::FixedRssLossModel'])
register_Ns3FriisPropagationLossModel_methods(root_module, root_module['ns3::FriisPropagationLossModel'])
register_Ns3IdealWifiManager_methods(root_module, root_module['ns3::IdealWifiManager'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3JakesPropagationLossModel_methods(root_module, root_module['ns3::JakesPropagationLossModel'])
register_Ns3LogDistancePropagationLossModel_methods(root_module, root_module['ns3::LogDistancePropagationLossModel'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3MacLow_methods(root_module, root_module['ns3::MacLow'])
register_Ns3MatrixPropagationLossModel_methods(root_module, root_module['ns3::MatrixPropagationLossModel'])
register_Ns3MgtBeaconHeader_methods(root_module, root_module['ns3::MgtBeaconHeader'])
register_Ns3MinstrelWifiManager_methods(root_module, root_module['ns3::MinstrelWifiManager'])
register_Ns3MsduAggregator_methods(root_module, root_module['ns3::MsduAggregator'])
register_Ns3NakagamiPropagationLossModel_methods(root_module, root_module['ns3::NakagamiPropagationLossModel'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NistErrorRateModel_methods(root_module, root_module['ns3::NistErrorRateModel'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OnoeWifiManager_methods(root_module, root_module['ns3::OnoeWifiManager'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3RandomVariableChecker_methods(root_module, root_module['ns3::RandomVariableChecker'])
register_Ns3RandomVariableValue_methods(root_module, root_module['ns3::RandomVariableValue'])
register_Ns3RegularWifiMac_methods(root_module, root_module['ns3::RegularWifiMac'])
register_Ns3RraaWifiManager_methods(root_module, root_module['ns3::RraaWifiManager'])
register_Ns3Ssid_methods(root_module, root_module['ns3::Ssid'])
register_Ns3SsidChecker_methods(root_module, root_module['ns3::SsidChecker'])
register_Ns3SsidValue_methods(root_module, root_module['ns3::SsidValue'])
register_Ns3StaWifiMac_methods(root_module, root_module['ns3::StaWifiMac'])
register_Ns3SupportedRates_methods(root_module, root_module['ns3::SupportedRates'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])
register_Ns3WifiChannel_methods(root_module, root_module['ns3::WifiChannel'])
register_Ns3WifiModeChecker_methods(root_module, root_module['ns3::WifiModeChecker'])
register_Ns3WifiModeValue_methods(root_module, root_module['ns3::WifiModeValue'])
register_Ns3WifiNetDevice_methods(root_module, root_module['ns3::WifiNetDevice'])
register_Ns3YansErrorRateModel_methods(root_module, root_module['ns3::YansErrorRateModel'])
register_Ns3YansWifiChannel_methods(root_module, root_module['ns3::YansWifiChannel'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3AdhocWifiMac_methods(root_module, root_module['ns3::AdhocWifiMac'])
register_Ns3ApWifiMac_methods(root_module, root_module['ns3::ApWifiMac'])
register_Ns3DcaTxop_methods(root_module, root_module['ns3::DcaTxop'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AsciiTraceHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper(ns3::AsciiTraceHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::OutputStreamWrapper> ns3::AsciiTraceHelper::CreateFileStream(std::string filename, std::_Ios_Openmode filemode=std::ios_base::out) [member function]
cls.add_method('CreateFileStream',
'ns3::Ptr< ns3::OutputStreamWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode', default_value='std::ios_base::out')])
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDequeueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDequeueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDropSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDropSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultEnqueueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultEnqueueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultReceiveSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultReceiveSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3AsciiTraceHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice(ns3::AsciiTraceHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Ptr<ns3::NetDevice> nd) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Ptr< ns3::NetDevice >', 'nd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, std::string ndName, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string ndName) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'ndName')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool explicitFilename) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'explicitFilename')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, uint32_t nodeid, uint32_t deviceid) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(std::string prefix) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('std::string', 'prefix')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(ns3::Ptr<ns3::OutputStreamWrapper> stream) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3AthstatsHelper_methods(root_module, cls):
## athstats-helper.h (module 'wifi'): ns3::AthstatsHelper::AthstatsHelper(ns3::AthstatsHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AthstatsHelper const &', 'arg0')])
## athstats-helper.h (module 'wifi'): ns3::AthstatsHelper::AthstatsHelper() [constructor]
cls.add_constructor([])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsHelper::EnableAthstats(std::string filename, uint32_t nodeid, uint32_t deviceid) [member function]
cls.add_method('EnableAthstats',
'void',
[param('std::string', 'filename'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsHelper::EnableAthstats(std::string filename, ns3::Ptr<ns3::NetDevice> nd) [member function]
cls.add_method('EnableAthstats',
'void',
[param('std::string', 'filename'), param('ns3::Ptr< ns3::NetDevice >', 'nd')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsHelper::EnableAthstats(std::string filename, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAthstats',
'void',
[param('std::string', 'filename'), param('ns3::NetDeviceContainer', 'd')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsHelper::EnableAthstats(std::string filename, ns3::NodeContainer n) [member function]
cls.add_method('EnableAthstats',
'void',
[param('std::string', 'filename'), param('ns3::NodeContainer', 'n')])
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Bar_methods(root_module, cls):
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar(ns3::Bar const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Bar const &', 'arg0')])
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar() [constructor]
cls.add_constructor([])
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address recipient, uint8_t tid, bool immediate) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('bool', 'immediate')])
## block-ack-manager.h (module 'wifi'): ns3::Bar::bar [variable]
cls.add_instance_attribute('bar', 'ns3::Ptr< ns3::Packet const >', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::immediate [variable]
cls.add_instance_attribute('immediate', 'bool', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::recipient [variable]
cls.add_instance_attribute('recipient', 'ns3::Mac48Address', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::tid [variable]
cls.add_instance_attribute('tid', 'uint8_t', is_const=False)
return
def register_Ns3BlockAckAgreement_methods(root_module, cls):
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement(ns3::BlockAckAgreement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BlockAckAgreement const &', 'arg0')])
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement() [constructor]
cls.add_constructor([])
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement(ns3::Mac48Address peer, uint8_t tid) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'peer'), param('uint8_t', 'tid')])
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): ns3::Mac48Address ns3::BlockAckAgreement::GetPeer() const [member function]
cls.add_method('GetPeer',
'ns3::Mac48Address',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint8_t ns3::BlockAckAgreement::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): bool ns3::BlockAckAgreement::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): bool ns3::BlockAckAgreement::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetBufferSize(uint16_t bufferSize) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'bufferSize')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3BlockAckCache_methods(root_module, cls):
## block-ack-cache.h (module 'wifi'): ns3::BlockAckCache::BlockAckCache() [constructor]
cls.add_constructor([])
## block-ack-cache.h (module 'wifi'): ns3::BlockAckCache::BlockAckCache(ns3::BlockAckCache const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BlockAckCache const &', 'arg0')])
## block-ack-cache.h (module 'wifi'): void ns3::BlockAckCache::FillBlockAckBitmap(ns3::CtrlBAckResponseHeader * blockAckHeader) [member function]
cls.add_method('FillBlockAckBitmap',
'void',
[param('ns3::CtrlBAckResponseHeader *', 'blockAckHeader')])
## block-ack-cache.h (module 'wifi'): void ns3::BlockAckCache::Init(uint16_t winStart, uint16_t winSize) [member function]
cls.add_method('Init',
'void',
[param('uint16_t', 'winStart'), param('uint16_t', 'winSize')])
## block-ack-cache.h (module 'wifi'): void ns3::BlockAckCache::UpdateWithBlockAckReq(uint16_t startingSeq) [member function]
cls.add_method('UpdateWithBlockAckReq',
'void',
[param('uint16_t', 'startingSeq')])
## block-ack-cache.h (module 'wifi'): void ns3::BlockAckCache::UpdateWithMpdu(ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('UpdateWithMpdu',
'void',
[param('ns3::WifiMacHeader const *', 'hdr')])
return
def register_Ns3BlockAckManager_methods(root_module, cls):
## block-ack-manager.h (module 'wifi'): ns3::BlockAckManager::BlockAckManager() [constructor]
cls.add_constructor([])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::CreateAgreement(ns3::MgtAddBaRequestHeader const * reqHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('CreateAgreement',
'void',
[param('ns3::MgtAddBaRequestHeader const *', 'reqHdr'), param('ns3::Mac48Address', 'recipient')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::DestroyAgreement(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('DestroyAgreement',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::ExistsAgreement(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('ExistsAgreement',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::ExistsAgreementInState(ns3::Mac48Address recipient, uint8_t tid, ns3::OriginatorBlockAckAgreement::State state) const [member function]
cls.add_method('ExistsAgreementInState',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('ns3::OriginatorBlockAckAgreement::State', 'state')],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNBufferedPackets(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetNBufferedPackets',
'uint32_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNRetryNeededPackets(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetNRetryNeededPackets',
'uint32_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::BlockAckManager::GetNextPacket(ns3::WifiMacHeader & hdr) [member function]
cls.add_method('GetNextPacket',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader &', 'hdr')])
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNextPacketSize() const [member function]
cls.add_method('GetNextPacketSize',
'uint32_t',
[],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint16_t ns3::BlockAckManager::GetSeqNumOfNextRetryPacket(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetSeqNumOfNextRetryPacket',
'uint16_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasBar(ns3::Bar & bar) [member function]
cls.add_method('HasBar',
'bool',
[param('ns3::Bar &', 'bar')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasOtherFragments(uint16_t sequenceNumber) const [member function]
cls.add_method('HasOtherFragments',
'bool',
[param('uint16_t', 'sequenceNumber')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasPackets() const [member function]
cls.add_method('HasPackets',
'bool',
[],
is_const=True)
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyAgreementEstablished(ns3::Mac48Address recipient, uint8_t tid, uint16_t startingSeq) [member function]
cls.add_method('NotifyAgreementEstablished',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'startingSeq')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyAgreementUnsuccessful(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('NotifyAgreementUnsuccessful',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyGotBlockAck(ns3::CtrlBAckResponseHeader const * blockAck, ns3::Mac48Address recipient) [member function]
cls.add_method('NotifyGotBlockAck',
'void',
[param('ns3::CtrlBAckResponseHeader const *', 'blockAck'), param('ns3::Mac48Address', 'recipient')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyMpduTransmission(ns3::Mac48Address recipient, uint8_t tid, uint16_t nextSeqNumber) [member function]
cls.add_method('NotifyMpduTransmission',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'nextSeqNumber')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckInactivityCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetBlockAckInactivityCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckThreshold(uint8_t nPackets) [member function]
cls.add_method('SetBlockAckThreshold',
'void',
[param('uint8_t', 'nPackets')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckType(ns3::BlockAckType bAckType) [member function]
cls.add_method('SetBlockAckType',
'void',
[param('ns3::BlockAckType', 'bAckType')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockDestinationCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetBlockDestinationCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetMaxPacketDelay(ns3::Time maxDelay) [member function]
cls.add_method('SetMaxPacketDelay',
'void',
[param('ns3::Time', 'maxDelay')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetQueue(ns3::Ptr<ns3::WifiMacQueue> queue) [member function]
cls.add_method('SetQueue',
'void',
[param('ns3::Ptr< ns3::WifiMacQueue >', 'queue')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetTxMiddle(ns3::MacTxMiddle * txMiddle) [member function]
cls.add_method('SetTxMiddle',
'void',
[param('ns3::MacTxMiddle *', 'txMiddle')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetUnblockDestinationCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetUnblockDestinationCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::StorePacket(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr, ns3::Time tStamp) [member function]
cls.add_method('StorePacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr'), param('ns3::Time', 'tStamp')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::SwitchToBlockAckIfNeeded(ns3::Mac48Address recipient, uint8_t tid, uint16_t startingSeq) [member function]
cls.add_method('SwitchToBlockAckIfNeeded',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'startingSeq')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::TearDownBlockAck(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('TearDownBlockAck',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::UpdateAgreement(ns3::MgtAddBaResponseHeader const * respHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('UpdateAgreement',
'void',
[param('ns3::MgtAddBaResponseHeader const *', 'respHdr'), param('ns3::Mac48Address', 'recipient')])
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CapabilityInformation_methods(root_module, cls):
## capability-information.h (module 'wifi'): ns3::CapabilityInformation::CapabilityInformation(ns3::CapabilityInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CapabilityInformation const &', 'arg0')])
## capability-information.h (module 'wifi'): ns3::CapabilityInformation::CapabilityInformation() [constructor]
cls.add_constructor([])
## capability-information.h (module 'wifi'): ns3::Buffer::Iterator ns3::CapabilityInformation::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')])
## capability-information.h (module 'wifi'): uint32_t ns3::CapabilityInformation::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## capability-information.h (module 'wifi'): bool ns3::CapabilityInformation::IsEss() const [member function]
cls.add_method('IsEss',
'bool',
[],
is_const=True)
## capability-information.h (module 'wifi'): bool ns3::CapabilityInformation::IsIbss() const [member function]
cls.add_method('IsIbss',
'bool',
[],
is_const=True)
## capability-information.h (module 'wifi'): ns3::Buffer::Iterator ns3::CapabilityInformation::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## capability-information.h (module 'wifi'): void ns3::CapabilityInformation::SetEss() [member function]
cls.add_method('SetEss',
'void',
[])
## capability-information.h (module 'wifi'): void ns3::CapabilityInformation::SetIbss() [member function]
cls.add_method('SetIbss',
'void',
[])
return
def register_Ns3DcfManager_methods(root_module, cls):
## dcf-manager.h (module 'wifi'): ns3::DcfManager::DcfManager(ns3::DcfManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DcfManager const &', 'arg0')])
## dcf-manager.h (module 'wifi'): ns3::DcfManager::DcfManager() [constructor]
cls.add_constructor([])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::Add(ns3::DcfState * dcf) [member function]
cls.add_method('Add',
'void',
[param('ns3::DcfState *', 'dcf')])
## dcf-manager.h (module 'wifi'): ns3::Time ns3::DcfManager::GetEifsNoDifs() const [member function]
cls.add_method('GetEifsNoDifs',
'ns3::Time',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyAckTimeoutResetNow() [member function]
cls.add_method('NotifyAckTimeoutResetNow',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyAckTimeoutStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyAckTimeoutStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyCtsTimeoutResetNow() [member function]
cls.add_method('NotifyCtsTimeoutResetNow',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyCtsTimeoutStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyCtsTimeoutStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyMaybeCcaBusyStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyMaybeCcaBusyStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyNavResetNow(ns3::Time duration) [member function]
cls.add_method('NotifyNavResetNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyNavStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyNavStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyRxEndErrorNow() [member function]
cls.add_method('NotifyRxEndErrorNow',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyRxEndOkNow() [member function]
cls.add_method('NotifyRxEndOkNow',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyRxStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyRxStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifySwitchingStartNow(ns3::Time duration) [member function]
cls.add_method('NotifySwitchingStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::NotifyTxStartNow(ns3::Time duration) [member function]
cls.add_method('NotifyTxStartNow',
'void',
[param('ns3::Time', 'duration')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::RequestAccess(ns3::DcfState * state) [member function]
cls.add_method('RequestAccess',
'void',
[param('ns3::DcfState *', 'state')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::SetEifsNoDifs(ns3::Time eifsNoDifs) [member function]
cls.add_method('SetEifsNoDifs',
'void',
[param('ns3::Time', 'eifsNoDifs')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::SetSlot(ns3::Time slotTime) [member function]
cls.add_method('SetSlot',
'void',
[param('ns3::Time', 'slotTime')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::SetupLowListener(ns3::Ptr<ns3::MacLow> low) [member function]
cls.add_method('SetupLowListener',
'void',
[param('ns3::Ptr< ns3::MacLow >', 'low')])
## dcf-manager.h (module 'wifi'): void ns3::DcfManager::SetupPhyListener(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetupPhyListener',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')])
return
def register_Ns3DcfState_methods(root_module, cls):
## dcf-manager.h (module 'wifi'): ns3::DcfState::DcfState(ns3::DcfState const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DcfState const &', 'arg0')])
## dcf-manager.h (module 'wifi'): ns3::DcfState::DcfState() [constructor]
cls.add_constructor([])
## dcf-manager.h (module 'wifi'): uint32_t ns3::DcfState::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): uint32_t ns3::DcfState::GetCw() const [member function]
cls.add_method('GetCw',
'uint32_t',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): uint32_t ns3::DcfState::GetCwMax() const [member function]
cls.add_method('GetCwMax',
'uint32_t',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): uint32_t ns3::DcfState::GetCwMin() const [member function]
cls.add_method('GetCwMin',
'uint32_t',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): bool ns3::DcfState::IsAccessRequested() const [member function]
cls.add_method('IsAccessRequested',
'bool',
[],
is_const=True)
## dcf-manager.h (module 'wifi'): void ns3::DcfState::ResetCw() [member function]
cls.add_method('ResetCw',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::SetCwMax(uint32_t maxCw) [member function]
cls.add_method('SetCwMax',
'void',
[param('uint32_t', 'maxCw')])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::SetCwMin(uint32_t minCw) [member function]
cls.add_method('SetCwMin',
'void',
[param('uint32_t', 'minCw')])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::StartBackoffNow(uint32_t nSlots) [member function]
cls.add_method('StartBackoffNow',
'void',
[param('uint32_t', 'nSlots')])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::UpdateFailedCw() [member function]
cls.add_method('UpdateFailedCw',
'void',
[])
## dcf-manager.h (module 'wifi'): void ns3::DcfState::DoNotifyAccessGranted() [member function]
cls.add_method('DoNotifyAccessGranted',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## dcf-manager.h (module 'wifi'): void ns3::DcfState::DoNotifyChannelSwitching() [member function]
cls.add_method('DoNotifyChannelSwitching',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## dcf-manager.h (module 'wifi'): void ns3::DcfState::DoNotifyCollision() [member function]
cls.add_method('DoNotifyCollision',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## dcf-manager.h (module 'wifi'): void ns3::DcfState::DoNotifyInternalCollision() [member function]
cls.add_method('DoNotifyInternalCollision',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3DsssErrorRateModel_methods(root_module, cls):
## dsss-error-rate-model.h (module 'wifi'): ns3::DsssErrorRateModel::DsssErrorRateModel() [constructor]
cls.add_constructor([])
## dsss-error-rate-model.h (module 'wifi'): ns3::DsssErrorRateModel::DsssErrorRateModel(ns3::DsssErrorRateModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DsssErrorRateModel const &', 'arg0')])
## dsss-error-rate-model.h (module 'wifi'): static double ns3::DsssErrorRateModel::DqpskFunction(double x) [member function]
cls.add_method('DqpskFunction',
'double',
[param('double', 'x')],
is_static=True)
## dsss-error-rate-model.h (module 'wifi'): static double ns3::DsssErrorRateModel::GetDsssDbpskSuccessRate(double sinr, uint32_t nbits) [member function]
cls.add_method('GetDsssDbpskSuccessRate',
'double',
[param('double', 'sinr'), param('uint32_t', 'nbits')],
is_static=True)
## dsss-error-rate-model.h (module 'wifi'): static double ns3::DsssErrorRateModel::GetDsssDqpskCck11SuccessRate(double sinr, uint32_t nbits) [member function]
cls.add_method('GetDsssDqpskCck11SuccessRate',
'double',
[param('double', 'sinr'), param('uint32_t', 'nbits')],
is_static=True)
## dsss-error-rate-model.h (module 'wifi'): static double ns3::DsssErrorRateModel::GetDsssDqpskCck5_5SuccessRate(double sinr, uint32_t nbits) [member function]
cls.add_method('GetDsssDqpskCck5_5SuccessRate',
'double',
[param('double', 'sinr'), param('uint32_t', 'nbits')],
is_static=True)
## dsss-error-rate-model.h (module 'wifi'): static double ns3::DsssErrorRateModel::GetDsssDqpskSuccessRate(double sinr, uint32_t nbits) [member function]
cls.add_method('GetDsssDqpskSuccessRate',
'double',
[param('double', 'sinr'), param('uint32_t', 'nbits')],
is_static=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3InterferenceHelper_methods(root_module, cls):
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::InterferenceHelper() [constructor]
cls.add_constructor([])
## interference-helper.h (module 'wifi'): ns3::Ptr<ns3::InterferenceHelper::Event> ns3::InterferenceHelper::Add(uint32_t size, ns3::WifiMode payloadMode, ns3::WifiPreamble preamble, ns3::Time duration, double rxPower) [member function]
cls.add_method('Add',
'ns3::Ptr< ns3::InterferenceHelper::Event >',
[param('uint32_t', 'size'), param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble'), param('ns3::Time', 'duration'), param('double', 'rxPower')])
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer ns3::InterferenceHelper::CalculateSnrPer(ns3::Ptr<ns3::InterferenceHelper::Event> event) [member function]
cls.add_method('CalculateSnrPer',
'ns3::InterferenceHelper::SnrPer',
[param('ns3::Ptr< ns3::InterferenceHelper::Event >', 'event')])
## interference-helper.h (module 'wifi'): void ns3::InterferenceHelper::EraseEvents() [member function]
cls.add_method('EraseEvents',
'void',
[])
## interference-helper.h (module 'wifi'): ns3::Time ns3::InterferenceHelper::GetEnergyDuration(double energyW) [member function]
cls.add_method('GetEnergyDuration',
'ns3::Time',
[param('double', 'energyW')])
## interference-helper.h (module 'wifi'): ns3::Ptr<ns3::ErrorRateModel> ns3::InterferenceHelper::GetErrorRateModel() const [member function]
cls.add_method('GetErrorRateModel',
'ns3::Ptr< ns3::ErrorRateModel >',
[],
is_const=True)
## interference-helper.h (module 'wifi'): double ns3::InterferenceHelper::GetNoiseFigure() const [member function]
cls.add_method('GetNoiseFigure',
'double',
[],
is_const=True)
## interference-helper.h (module 'wifi'): void ns3::InterferenceHelper::NotifyRxEnd() [member function]
cls.add_method('NotifyRxEnd',
'void',
[])
## interference-helper.h (module 'wifi'): void ns3::InterferenceHelper::NotifyRxStart() [member function]
cls.add_method('NotifyRxStart',
'void',
[])
## interference-helper.h (module 'wifi'): void ns3::InterferenceHelper::SetErrorRateModel(ns3::Ptr<ns3::ErrorRateModel> rate) [member function]
cls.add_method('SetErrorRateModel',
'void',
[param('ns3::Ptr< ns3::ErrorRateModel >', 'rate')])
## interference-helper.h (module 'wifi'): void ns3::InterferenceHelper::SetNoiseFigure(double value) [member function]
cls.add_method('SetNoiseFigure',
'void',
[param('double', 'value')])
return
def register_Ns3InterferenceHelperSnrPer_methods(root_module, cls):
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer::SnrPer() [constructor]
cls.add_constructor([])
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer::SnrPer(ns3::InterferenceHelper::SnrPer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InterferenceHelper::SnrPer const &', 'arg0')])
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer::per [variable]
cls.add_instance_attribute('per', 'double', is_const=False)
## interference-helper.h (module 'wifi'): ns3::InterferenceHelper::SnrPer::snr [variable]
cls.add_instance_attribute('snr', 'double', is_const=False)
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3MacLowBlockAckEventListener_methods(root_module, cls):
## mac-low.h (module 'wifi'): ns3::MacLowBlockAckEventListener::MacLowBlockAckEventListener(ns3::MacLowBlockAckEventListener const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacLowBlockAckEventListener const &', 'arg0')])
## mac-low.h (module 'wifi'): ns3::MacLowBlockAckEventListener::MacLowBlockAckEventListener() [constructor]
cls.add_constructor([])
## mac-low.h (module 'wifi'): void ns3::MacLowBlockAckEventListener::BlockAckInactivityTimeout(ns3::Mac48Address originator, uint8_t tid) [member function]
cls.add_method('BlockAckInactivityTimeout',
'void',
[param('ns3::Mac48Address', 'originator'), param('uint8_t', 'tid')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3MacLowDcfListener_methods(root_module, cls):
## mac-low.h (module 'wifi'): ns3::MacLowDcfListener::MacLowDcfListener(ns3::MacLowDcfListener const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacLowDcfListener const &', 'arg0')])
## mac-low.h (module 'wifi'): ns3::MacLowDcfListener::MacLowDcfListener() [constructor]
cls.add_constructor([])
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::AckTimeoutReset() [member function]
cls.add_method('AckTimeoutReset',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::AckTimeoutStart(ns3::Time duration) [member function]
cls.add_method('AckTimeoutStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::CtsTimeoutReset() [member function]
cls.add_method('CtsTimeoutReset',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::CtsTimeoutStart(ns3::Time duration) [member function]
cls.add_method('CtsTimeoutStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::NavReset(ns3::Time duration) [member function]
cls.add_method('NavReset',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowDcfListener::NavStart(ns3::Time duration) [member function]
cls.add_method('NavStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3MacLowTransmissionListener_methods(root_module, cls):
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionListener::MacLowTransmissionListener(ns3::MacLowTransmissionListener const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacLowTransmissionListener const &', 'arg0')])
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionListener::MacLowTransmissionListener() [constructor]
cls.add_constructor([])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::Cancel() [member function]
cls.add_method('Cancel',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::GotAck(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotAck',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::GotBlockAck(ns3::CtrlBAckResponseHeader const * blockAck, ns3::Mac48Address source) [member function]
cls.add_method('GotBlockAck',
'void',
[param('ns3::CtrlBAckResponseHeader const *', 'blockAck'), param('ns3::Mac48Address', 'source')],
is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::GotCts(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotCts',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::MissedAck() [member function]
cls.add_method('MissedAck',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::MissedBlockAck() [member function]
cls.add_method('MissedBlockAck',
'void',
[],
is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::MissedCts() [member function]
cls.add_method('MissedCts',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionListener::StartNext() [member function]
cls.add_method('StartNext',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3MacLowTransmissionParameters_methods(root_module, cls):
cls.add_output_stream_operator()
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionParameters::MacLowTransmissionParameters(ns3::MacLowTransmissionParameters const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacLowTransmissionParameters const &', 'arg0')])
## mac-low.h (module 'wifi'): ns3::MacLowTransmissionParameters::MacLowTransmissionParameters() [constructor]
cls.add_constructor([])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::DisableAck() [member function]
cls.add_method('DisableAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::DisableNextData() [member function]
cls.add_method('DisableNextData',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::DisableOverrideDurationId() [member function]
cls.add_method('DisableOverrideDurationId',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::DisableRts() [member function]
cls.add_method('DisableRts',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableAck() [member function]
cls.add_method('EnableAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableBasicBlockAck() [member function]
cls.add_method('EnableBasicBlockAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableCompressedBlockAck() [member function]
cls.add_method('EnableCompressedBlockAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableFastAck() [member function]
cls.add_method('EnableFastAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableMultiTidBlockAck() [member function]
cls.add_method('EnableMultiTidBlockAck',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableNextData(uint32_t size) [member function]
cls.add_method('EnableNextData',
'void',
[param('uint32_t', 'size')])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableOverrideDurationId(ns3::Time durationId) [member function]
cls.add_method('EnableOverrideDurationId',
'void',
[param('ns3::Time', 'durationId')])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableRts() [member function]
cls.add_method('EnableRts',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLowTransmissionParameters::EnableSuperFastAck() [member function]
cls.add_method('EnableSuperFastAck',
'void',
[])
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLowTransmissionParameters::GetDurationId() const [member function]
cls.add_method('GetDurationId',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): uint32_t ns3::MacLowTransmissionParameters::GetNextPacketSize() const [member function]
cls.add_method('GetNextPacketSize',
'uint32_t',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::HasDurationId() const [member function]
cls.add_method('HasDurationId',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::HasNextPacket() const [member function]
cls.add_method('HasNextPacket',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustSendRts() const [member function]
cls.add_method('MustSendRts',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitAck() const [member function]
cls.add_method('MustWaitAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitBasicBlockAck() const [member function]
cls.add_method('MustWaitBasicBlockAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitCompressedBlockAck() const [member function]
cls.add_method('MustWaitCompressedBlockAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitFastAck() const [member function]
cls.add_method('MustWaitFastAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitMultiTidBlockAck() const [member function]
cls.add_method('MustWaitMultiTidBlockAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitNormalAck() const [member function]
cls.add_method('MustWaitNormalAck',
'bool',
[],
is_const=True)
## mac-low.h (module 'wifi'): bool ns3::MacLowTransmissionParameters::MustWaitSuperFastAck() const [member function]
cls.add_method('MustWaitSuperFastAck',
'bool',
[],
is_const=True)
return
def register_Ns3MacRxMiddle_methods(root_module, cls):
## mac-rx-middle.h (module 'wifi'): ns3::MacRxMiddle::MacRxMiddle(ns3::MacRxMiddle const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacRxMiddle const &', 'arg0')])
## mac-rx-middle.h (module 'wifi'): ns3::MacRxMiddle::MacRxMiddle() [constructor]
cls.add_constructor([])
## mac-rx-middle.h (module 'wifi'): void ns3::MacRxMiddle::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')])
## mac-rx-middle.h (module 'wifi'): void ns3::MacRxMiddle::SetForwardCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::WifiMacHeader const*, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetForwardCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3OriginatorBlockAckAgreement_methods(root_module, cls):
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement(ns3::OriginatorBlockAckAgreement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OriginatorBlockAckAgreement const &', 'arg0')])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement() [constructor]
cls.add_constructor([])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement(ns3::Mac48Address recipient, uint8_t tid) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::CompleteExchange() [member function]
cls.add_method('CompleteExchange',
'void',
[])
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsBlockAckRequestNeeded() const [member function]
cls.add_method('IsBlockAckRequestNeeded',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsEstablished() const [member function]
cls.add_method('IsEstablished',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsInactive() const [member function]
cls.add_method('IsInactive',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsPending() const [member function]
cls.add_method('IsPending',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsUnsuccessful() const [member function]
cls.add_method('IsUnsuccessful',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::NotifyMpduTransmission(uint16_t nextSeqNumber) [member function]
cls.add_method('NotifyMpduTransmission',
'void',
[param('uint16_t', 'nextSeqNumber')])
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::SetState(ns3::OriginatorBlockAckAgreement::State state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::OriginatorBlockAckAgreement::State', 'state')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PcapFile_methods(root_module, cls):
## pcap-file.h (module 'network'): ns3::PcapFile::PcapFile() [constructor]
cls.add_constructor([])
## pcap-file.h (module 'network'): void ns3::PcapFile::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file.h (module 'network'): static bool ns3::PcapFile::Diff(std::string const & f1, std::string const & f2, uint32_t & sec, uint32_t & usec, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT) [member function]
cls.add_method('Diff',
'bool',
[param('std::string const &', 'f1'), param('std::string const &', 'f2'), param('uint32_t &', 'sec'), param('uint32_t &', 'usec'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT')],
is_static=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file.h (module 'network'): bool ns3::PcapFile::GetSwapMode() [member function]
cls.add_method('GetSwapMode',
'bool',
[])
## pcap-file.h (module 'network'): int32_t ns3::PcapFile::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Init(uint32_t dataLinkType, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT, int32_t timeZoneCorrection=ns3::PcapFile::ZONE_DEFAULT, bool swapMode=false) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT'), param('int32_t', 'timeZoneCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT'), param('bool', 'swapMode', default_value='false')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Read(uint8_t * const data, uint32_t maxBytes, uint32_t & tsSec, uint32_t & tsUsec, uint32_t & inclLen, uint32_t & origLen, uint32_t & readLen) [member function]
cls.add_method('Read',
'void',
[param('uint8_t * const', 'data'), param('uint32_t', 'maxBytes'), param('uint32_t &', 'tsSec'), param('uint32_t &', 'tsUsec'), param('uint32_t &', 'inclLen'), param('uint32_t &', 'origLen'), param('uint32_t &', 'readLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, uint8_t const * const data, uint32_t totalLen) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('uint8_t const * const', 'data'), param('uint32_t', 'totalLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Header & header, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): ns3::PcapFile::SNAPLEN_DEFAULT [variable]
cls.add_static_attribute('SNAPLEN_DEFAULT', 'uint32_t const', is_const=True)
## pcap-file.h (module 'network'): ns3::PcapFile::ZONE_DEFAULT [variable]
cls.add_static_attribute('ZONE_DEFAULT', 'int32_t const', is_const=True)
return
def register_Ns3PcapHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper(ns3::PcapHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::PcapFileWrapper> ns3::PcapHelper::CreateFile(std::string filename, std::_Ios_Openmode filemode, uint32_t dataLinkType, uint32_t snapLen=65535, int32_t tzCorrection=0) [member function]
cls.add_method('CreateFile',
'ns3::Ptr< ns3::PcapFileWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode'), param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='65535'), param('int32_t', 'tzCorrection', default_value='0')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3PcapHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice(ns3::PcapHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, std::string ndName, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NetDeviceContainer d, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NodeContainer n, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapAll(std::string prefix, bool promiscuous=false) [member function]
cls.add_method('EnablePcapAll',
'void',
[param('std::string', 'prefix'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3RandomVariable_methods(root_module, cls):
cls.add_output_stream_operator()
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable(ns3::RandomVariable const & o) [copy constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'o')])
## random-variable.h (module 'core'): uint32_t ns3::RandomVariable::GetInteger() const [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::RandomVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
return
def register_Ns3RateInfo_methods(root_module, cls):
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::RateInfo() [constructor]
cls.add_constructor([])
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::RateInfo(ns3::RateInfo const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RateInfo const &', 'arg0')])
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::adjustedRetryCount [variable]
cls.add_instance_attribute('adjustedRetryCount', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::attemptHist [variable]
cls.add_instance_attribute('attemptHist', 'uint64_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::ewmaProb [variable]
cls.add_instance_attribute('ewmaProb', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::numRateAttempt [variable]
cls.add_instance_attribute('numRateAttempt', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::numRateSuccess [variable]
cls.add_instance_attribute('numRateSuccess', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::perfectTxTime [variable]
cls.add_instance_attribute('perfectTxTime', 'ns3::Time', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::prevNumRateAttempt [variable]
cls.add_instance_attribute('prevNumRateAttempt', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::prevNumRateSuccess [variable]
cls.add_instance_attribute('prevNumRateSuccess', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::prob [variable]
cls.add_instance_attribute('prob', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::retryCount [variable]
cls.add_instance_attribute('retryCount', 'uint32_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::successHist [variable]
cls.add_instance_attribute('successHist', 'uint64_t', is_const=False)
## minstrel-wifi-manager.h (module 'wifi'): ns3::RateInfo::throughput [variable]
cls.add_instance_attribute('throughput', 'uint32_t', is_const=False)
return
def register_Ns3SeedManager_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::SeedManager::SeedManager() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::SeedManager::SeedManager(ns3::SeedManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SeedManager const &', 'arg0')])
## random-variable.h (module 'core'): static bool ns3::SeedManager::CheckSeed(uint32_t seed) [member function]
cls.add_method('CheckSeed',
'bool',
[param('uint32_t', 'seed')],
is_static=True)
## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetRun() [member function]
cls.add_method('GetRun',
'uint32_t',
[],
is_static=True)
## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetSeed() [member function]
cls.add_method('GetSeed',
'uint32_t',
[],
is_static=True)
## random-variable.h (module 'core'): static void ns3::SeedManager::SetRun(uint32_t run) [member function]
cls.add_method('SetRun',
'void',
[param('uint32_t', 'run')],
is_static=True)
## random-variable.h (module 'core'): static void ns3::SeedManager::SetSeed(uint32_t seed) [member function]
cls.add_method('SetSeed',
'void',
[param('uint32_t', 'seed')],
is_static=True)
return
def register_Ns3SequentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(ns3::SequentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SequentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, double i=1, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('double', 'i', default_value='1'), param('uint32_t', 'c', default_value='1')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, ns3::RandomVariable const & i, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('ns3::RandomVariable const &', 'i'), param('uint32_t', 'c', default_value='1')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3StatusCode_methods(root_module, cls):
cls.add_output_stream_operator()
## status-code.h (module 'wifi'): ns3::StatusCode::StatusCode(ns3::StatusCode const & arg0) [copy constructor]
cls.add_constructor([param('ns3::StatusCode const &', 'arg0')])
## status-code.h (module 'wifi'): ns3::StatusCode::StatusCode() [constructor]
cls.add_constructor([])
## status-code.h (module 'wifi'): ns3::Buffer::Iterator ns3::StatusCode::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')])
## status-code.h (module 'wifi'): uint32_t ns3::StatusCode::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## status-code.h (module 'wifi'): bool ns3::StatusCode::IsSuccess() const [member function]
cls.add_method('IsSuccess',
'bool',
[],
is_const=True)
## status-code.h (module 'wifi'): ns3::Buffer::Iterator ns3::StatusCode::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## status-code.h (module 'wifi'): void ns3::StatusCode::SetFailure() [member function]
cls.add_method('SetFailure',
'void',
[])
## status-code.h (module 'wifi'): void ns3::StatusCode::SetSuccess() [member function]
cls.add_method('SetSuccess',
'void',
[])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TriangularVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(ns3::TriangularVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TriangularVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(double s, double l, double mean) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l'), param('double', 'mean')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3UniformVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(ns3::UniformVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UniformVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(double s, double l) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l')])
## random-variable.h (module 'core'): uint32_t ns3::UniformVariable::GetInteger(uint32_t s, uint32_t l) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 's'), param('uint32_t', 'l')])
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue(double s, double l) [member function]
cls.add_method('GetValue',
'double',
[param('double', 's'), param('double', 'l')])
return
def register_Ns3WeibullVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(ns3::WeibullVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WeibullVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
return
def register_Ns3WifiHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiHelper::WifiHelper(ns3::WifiHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::WifiHelper::WifiHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): static ns3::WifiHelper ns3::WifiHelper::Default() [member function]
cls.add_method('Default',
'ns3::WifiHelper',
[],
is_static=True)
## wifi-helper.h (module 'wifi'): static void ns3::WifiHelper::EnableLogComponents() [member function]
cls.add_method('EnableLogComponents',
'void',
[],
is_static=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, ns3::NodeContainer c) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('ns3::NodeContainer', 'c')],
is_const=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, std::string nodeName) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('std::string', 'nodeName')],
is_const=True)
## wifi-helper.h (module 'wifi'): void ns3::WifiHelper::SetRemoteStationManager(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetRemoteStationManager',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## wifi-helper.h (module 'wifi'): void ns3::WifiHelper::SetStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('SetStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')])
return
def register_Ns3WifiMacHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper::WifiMacHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper::WifiMacHelper(ns3::WifiMacHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMacHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiMac> ns3::WifiMacHelper::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiMac >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiMode_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode(ns3::WifiMode const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMode const &', 'arg0')])
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode(std::string name) [constructor]
cls.add_constructor([param('std::string', 'name')])
## wifi-mode.h (module 'wifi'): uint32_t ns3::WifiMode::GetBandwidth() const [member function]
cls.add_method('GetBandwidth',
'uint32_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): ns3::WifiCodeRate ns3::WifiMode::GetCodeRate() const [member function]
cls.add_method('GetCodeRate',
'ns3::WifiCodeRate',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint8_t ns3::WifiMode::GetConstellationSize() const [member function]
cls.add_method('GetConstellationSize',
'uint8_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint64_t ns3::WifiMode::GetDataRate() const [member function]
cls.add_method('GetDataRate',
'uint64_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): ns3::WifiModulationClass ns3::WifiMode::GetModulationClass() const [member function]
cls.add_method('GetModulationClass',
'ns3::WifiModulationClass',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint64_t ns3::WifiMode::GetPhyRate() const [member function]
cls.add_method('GetPhyRate',
'uint64_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint32_t ns3::WifiMode::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): std::string ns3::WifiMode::GetUniqueName() const [member function]
cls.add_method('GetUniqueName',
'std::string',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): bool ns3::WifiMode::IsMandatory() const [member function]
cls.add_method('IsMandatory',
'bool',
[],
is_const=True)
return
def register_Ns3WifiModeFactory_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeFactory::WifiModeFactory(ns3::WifiModeFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeFactory const &', 'arg0')])
## wifi-mode.h (module 'wifi'): static ns3::WifiMode ns3::WifiModeFactory::CreateWifiMode(std::string uniqueName, ns3::WifiModulationClass modClass, bool isMandatory, uint32_t bandwidth, uint32_t dataRate, ns3::WifiCodeRate codingRate, uint8_t constellationSize) [member function]
cls.add_method('CreateWifiMode',
'ns3::WifiMode',
[param('std::string', 'uniqueName'), param('ns3::WifiModulationClass', 'modClass'), param('bool', 'isMandatory'), param('uint32_t', 'bandwidth'), param('uint32_t', 'dataRate'), param('ns3::WifiCodeRate', 'codingRate'), param('uint8_t', 'constellationSize')],
is_static=True)
return
def register_Ns3WifiPhyHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper::WifiPhyHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper::WifiPhyHelper(ns3::WifiPhyHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhyHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::WifiPhyHelper::Create(ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::WifiNetDevice> device) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiPhy >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::WifiNetDevice >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiPhyListener_methods(root_module, cls):
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener::WifiPhyListener() [constructor]
cls.add_constructor([])
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener::WifiPhyListener(ns3::WifiPhyListener const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhyListener const &', 'arg0')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyMaybeCcaBusyStart(ns3::Time duration) [member function]
cls.add_method('NotifyMaybeCcaBusyStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxEndError() [member function]
cls.add_method('NotifyRxEndError',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxEndOk() [member function]
cls.add_method('NotifyRxEndOk',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxStart(ns3::Time duration) [member function]
cls.add_method('NotifyRxStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifySwitchingStart(ns3::Time duration) [member function]
cls.add_method('NotifySwitchingStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyTxStart(ns3::Time duration) [member function]
cls.add_method('NotifyTxStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3WifiRemoteStation_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::WifiRemoteStation() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::WifiRemoteStation(ns3::WifiRemoteStation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStation const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_slrc [variable]
cls.add_instance_attribute('m_slrc', 'uint32_t', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_ssrc [variable]
cls.add_instance_attribute('m_ssrc', 'uint32_t', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_state [variable]
cls.add_instance_attribute('m_state', 'ns3::WifiRemoteStationState *', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_tid [variable]
cls.add_instance_attribute('m_tid', 'uint8_t', is_const=False)
return
def register_Ns3WifiRemoteStationInfo_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo::WifiRemoteStationInfo(ns3::WifiRemoteStationInfo const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationInfo const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo::WifiRemoteStationInfo() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): double ns3::WifiRemoteStationInfo::GetFrameErrorRate() const [member function]
cls.add_method('GetFrameErrorRate',
'double',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationInfo::NotifyTxFailed() [member function]
cls.add_method('NotifyTxFailed',
'void',
[])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationInfo::NotifyTxSuccess(uint32_t retryCounter) [member function]
cls.add_method('NotifyTxSuccess',
'void',
[param('uint32_t', 'retryCounter')])
return
def register_Ns3WifiRemoteStationState_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::WifiRemoteStationState() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::WifiRemoteStationState(ns3::WifiRemoteStationState const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationState const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_address [variable]
cls.add_instance_attribute('m_address', 'ns3::Mac48Address', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_info [variable]
cls.add_instance_attribute('m_info', 'ns3::WifiRemoteStationInfo', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_operationalRateSet [variable]
cls.add_instance_attribute('m_operationalRateSet', 'ns3::WifiModeList', is_const=False)
return
def register_Ns3YansWifiChannelHelper_methods(root_module, cls):
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiChannelHelper::YansWifiChannelHelper(ns3::YansWifiChannelHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::YansWifiChannelHelper const &', 'arg0')])
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiChannelHelper::YansWifiChannelHelper() [constructor]
cls.add_constructor([])
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiChannelHelper::AddPropagationLoss(std::string name, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddPropagationLoss',
'void',
[param('std::string', 'name'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## yans-wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::YansWifiChannel> ns3::YansWifiChannelHelper::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::YansWifiChannel >',
[],
is_const=True)
## yans-wifi-helper.h (module 'wifi'): static ns3::YansWifiChannelHelper ns3::YansWifiChannelHelper::Default() [member function]
cls.add_method('Default',
'ns3::YansWifiChannelHelper',
[],
is_static=True)
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiChannelHelper::SetPropagationDelay(std::string name, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetPropagationDelay',
'void',
[param('std::string', 'name'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
return
def register_Ns3YansWifiPhyHelper_methods(root_module, cls):
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiPhyHelper::YansWifiPhyHelper(ns3::YansWifiPhyHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::YansWifiPhyHelper const &', 'arg0')])
## yans-wifi-helper.h (module 'wifi'): ns3::YansWifiPhyHelper::YansWifiPhyHelper() [constructor]
cls.add_constructor([])
## yans-wifi-helper.h (module 'wifi'): static ns3::YansWifiPhyHelper ns3::YansWifiPhyHelper::Default() [member function]
cls.add_method('Default',
'ns3::YansWifiPhyHelper',
[],
is_static=True)
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::Set(std::string name, ns3::AttributeValue const & v) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'v')])
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::SetChannel(ns3::Ptr<ns3::YansWifiChannel> channel) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::YansWifiChannel >', 'channel')])
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::SetChannel(std::string channelName) [member function]
cls.add_method('SetChannel',
'void',
[param('std::string', 'channelName')])
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::SetErrorRateModel(std::string name, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetErrorRateModel',
'void',
[param('std::string', 'name'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::SetPcapDataLinkType(ns3::YansWifiPhyHelper::SupportedPcapDataLinkTypes dlt) [member function]
cls.add_method('SetPcapDataLinkType',
'void',
[param('ns3::YansWifiPhyHelper::SupportedPcapDataLinkTypes', 'dlt')])
## yans-wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::YansWifiPhyHelper::Create(ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::WifiNetDevice> device) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiPhy >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::WifiNetDevice >', 'device')],
is_const=True, visibility='private', is_virtual=True)
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
visibility='private', is_virtual=True)
## yans-wifi-helper.h (module 'wifi'): void ns3::YansWifiPhyHelper::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
visibility='private', is_virtual=True)
return
def register_Ns3ZetaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(ns3::ZetaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZetaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(double alpha) [constructor]
cls.add_constructor([param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3ZipfVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(ns3::ZipfVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZipfVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(long int N, double alpha) [constructor]
cls.add_constructor([param('long int', 'N'), param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('!=')
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3ConstantVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(ns3::ConstantVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(double c) [constructor]
cls.add_constructor([param('double', 'c')])
## random-variable.h (module 'core'): void ns3::ConstantVariable::SetConstant(double c) [member function]
cls.add_method('SetConstant',
'void',
[param('double', 'c')])
return
def register_Ns3DeterministicVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(ns3::DeterministicVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DeterministicVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(double * d, uint32_t c) [constructor]
cls.add_constructor([param('double *', 'd'), param('uint32_t', 'c')])
return
def register_Ns3EmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable(ns3::EmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): void ns3::EmpiricalVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
return
def register_Ns3ErlangVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(ns3::ErlangVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ErlangVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(unsigned int k, double lambda) [constructor]
cls.add_constructor([param('unsigned int', 'k'), param('double', 'lambda')])
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue(unsigned int k, double lambda) const [member function]
cls.add_method('GetValue',
'double',
[param('unsigned int', 'k'), param('double', 'lambda')],
is_const=True)
return
def register_Ns3ExponentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(ns3::ExponentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ExponentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'b')])
return
def register_Ns3GammaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(ns3::GammaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GammaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(double alpha, double beta) [constructor]
cls.add_constructor([param('double', 'alpha'), param('double', 'beta')])
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue(double alpha, double beta) const [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')],
is_const=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3IntEmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable(ns3::IntEmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntEmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3LogNormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(ns3::LogNormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LogNormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(double mu, double sigma) [constructor]
cls.add_constructor([param('double', 'mu'), param('double', 'sigma')])
return
def register_Ns3MgtAddBaRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader::MgtAddBaRequestHeader(ns3::MgtAddBaRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAddBaRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader::MgtAddBaRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAddBaRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtAddBaRequestHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAddBaRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaRequestHeader::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaRequestHeader::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetBufferSize(uint16_t size) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'size')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetTid(uint8_t tid) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'tid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3MgtAddBaResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader::MgtAddBaResponseHeader(ns3::MgtAddBaResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAddBaResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader::MgtAddBaResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaResponseHeader::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAddBaResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::StatusCode ns3::MgtAddBaResponseHeader::GetStatusCode() const [member function]
cls.add_method('GetStatusCode',
'ns3::StatusCode',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtAddBaResponseHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaResponseHeader::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAddBaResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaResponseHeader::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaResponseHeader::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetBufferSize(uint16_t size) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'size')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetStatusCode(ns3::StatusCode code) [member function]
cls.add_method('SetStatusCode',
'void',
[param('ns3::StatusCode', 'code')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetTid(uint8_t tid) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'tid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3MgtAssocRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader::MgtAssocRequestHeader(ns3::MgtAssocRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAssocRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader::MgtAssocRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAssocRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAssocRequestHeader::GetListenInterval() const [member function]
cls.add_method('GetListenInterval',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtAssocRequestHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtAssocRequestHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAssocRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetListenInterval(uint16_t interval) [member function]
cls.add_method('SetListenInterval',
'void',
[param('uint16_t', 'interval')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtAssocResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader::MgtAssocResponseHeader(ns3::MgtAssocResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAssocResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader::MgtAssocResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAssocResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::StatusCode ns3::MgtAssocResponseHeader::GetStatusCode() [member function]
cls.add_method('GetStatusCode',
'ns3::StatusCode',
[])
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtAssocResponseHeader::GetSupportedRates() [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[])
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAssocResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::SetStatusCode(ns3::StatusCode code) [member function]
cls.add_method('SetStatusCode',
'void',
[param('ns3::StatusCode', 'code')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtDelBaHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader::MgtDelBaHeader(ns3::MgtDelBaHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtDelBaHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader::MgtDelBaHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtDelBaHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtDelBaHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtDelBaHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtDelBaHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtDelBaHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtDelBaHeader::IsByOriginator() const [member function]
cls.add_method('IsByOriginator',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetByOriginator() [member function]
cls.add_method('SetByOriginator',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetByRecipient() [member function]
cls.add_method('SetByRecipient',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetTid(uint8_t arg0) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'arg0')])
return
def register_Ns3MgtProbeRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader::MgtProbeRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader::MgtProbeRequestHeader(ns3::MgtProbeRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtProbeRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtProbeRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtProbeRequestHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtProbeRequestHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtProbeRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtProbeResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader::MgtProbeResponseHeader(ns3::MgtProbeResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtProbeResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader::MgtProbeResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint64_t ns3::MgtProbeResponseHeader::GetBeaconIntervalUs() const [member function]
cls.add_method('GetBeaconIntervalUs',
'uint64_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtProbeResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtProbeResponseHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtProbeResponseHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint64_t ns3::MgtProbeResponseHeader::GetTimestamp() [member function]
cls.add_method('GetTimestamp',
'uint64_t',
[])
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtProbeResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetBeaconIntervalUs(uint64_t us) [member function]
cls.add_method('SetBeaconIntervalUs',
'void',
[param('uint64_t', 'us')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3NormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(ns3::NormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v'), param('double', 'b')])
return
def register_Ns3NqosWifiMacHelper_methods(root_module, cls):
## nqos-wifi-mac-helper.h (module 'wifi'): ns3::NqosWifiMacHelper::NqosWifiMacHelper(ns3::NqosWifiMacHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NqosWifiMacHelper const &', 'arg0')])
## nqos-wifi-mac-helper.h (module 'wifi'): ns3::NqosWifiMacHelper::NqosWifiMacHelper() [constructor]
cls.add_constructor([])
## nqos-wifi-mac-helper.h (module 'wifi'): static ns3::NqosWifiMacHelper ns3::NqosWifiMacHelper::Default() [member function]
cls.add_method('Default',
'ns3::NqosWifiMacHelper',
[],
is_static=True)
## nqos-wifi-mac-helper.h (module 'wifi'): void ns3::NqosWifiMacHelper::SetType(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetType',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## nqos-wifi-mac-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiMac> ns3::NqosWifiMacHelper::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiMac >',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3ParetoVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(ns3::ParetoVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParetoVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params, double b) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params'), param('double', 'b')])
return
def register_Ns3PcapFileWrapper_methods(root_module, cls):
## pcap-file-wrapper.h (module 'network'): static ns3::TypeId ns3::PcapFileWrapper::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper::PcapFileWrapper() [constructor]
cls.add_constructor([])
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Init(uint32_t dataLinkType, uint32_t snapLen=std::numeric_limits<unsigned int>::max(), int32_t tzCorrection=ns3::PcapFile::ZONE_DEFAULT) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='std::numeric_limits<unsigned int>::max()'), param('int32_t', 'tzCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Header & header, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, uint8_t const * buffer, uint32_t length) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('uint8_t const *', 'buffer'), param('uint32_t', 'length')])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): int32_t ns3::PcapFileWrapper::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
return
def register_Ns3PropagationDelayModel_methods(root_module, cls):
## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel::PropagationDelayModel() [constructor]
cls.add_constructor([])
## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel::PropagationDelayModel(ns3::PropagationDelayModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PropagationDelayModel const &', 'arg0')])
## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::PropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::PropagationDelayModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3PropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::PropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::PropagationLossModel::PropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::PropagationLossModel::SetNext(ns3::Ptr<ns3::PropagationLossModel> next) [member function]
cls.add_method('SetNext',
'void',
[param('ns3::Ptr< ns3::PropagationLossModel >', 'next')])
## propagation-loss-model.h (module 'propagation'): double ns3::PropagationLossModel::CalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('CalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::PropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3QosTag_methods(root_module, cls):
## qos-tag.h (module 'wifi'): ns3::QosTag::QosTag(ns3::QosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QosTag const &', 'arg0')])
## qos-tag.h (module 'wifi'): ns3::QosTag::QosTag() [constructor]
cls.add_constructor([])
## qos-tag.h (module 'wifi'): ns3::QosTag::QosTag(uint8_t tid) [constructor]
cls.add_constructor([param('uint8_t', 'tid')])
## qos-tag.h (module 'wifi'): void ns3::QosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## qos-tag.h (module 'wifi'): ns3::TypeId ns3::QosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## qos-tag.h (module 'wifi'): uint32_t ns3::QosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## qos-tag.h (module 'wifi'): uint8_t ns3::QosTag::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## qos-tag.h (module 'wifi'): static ns3::TypeId ns3::QosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## qos-tag.h (module 'wifi'): void ns3::QosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## qos-tag.h (module 'wifi'): void ns3::QosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## qos-tag.h (module 'wifi'): void ns3::QosTag::SetTid(uint8_t tid) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'tid')])
## qos-tag.h (module 'wifi'): void ns3::QosTag::SetUserPriority(ns3::UserPriority up) [member function]
cls.add_method('SetUserPriority',
'void',
[param('ns3::UserPriority', 'up')])
return
def register_Ns3QosWifiMacHelper_methods(root_module, cls):
## qos-wifi-mac-helper.h (module 'wifi'): ns3::QosWifiMacHelper::QosWifiMacHelper(ns3::QosWifiMacHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QosWifiMacHelper const &', 'arg0')])
## qos-wifi-mac-helper.h (module 'wifi'): ns3::QosWifiMacHelper::QosWifiMacHelper() [constructor]
cls.add_constructor([])
## qos-wifi-mac-helper.h (module 'wifi'): static ns3::QosWifiMacHelper ns3::QosWifiMacHelper::Default() [member function]
cls.add_method('Default',
'ns3::QosWifiMacHelper',
[],
is_static=True)
## qos-wifi-mac-helper.h (module 'wifi'): void ns3::QosWifiMacHelper::SetBlockAckInactivityTimeoutForAc(ns3::AcIndex ac, uint16_t timeout) [member function]
cls.add_method('SetBlockAckInactivityTimeoutForAc',
'void',
[param('ns3::AcIndex', 'ac'), param('uint16_t', 'timeout')])
## qos-wifi-mac-helper.h (module 'wifi'): void ns3::QosWifiMacHelper::SetBlockAckThresholdForAc(ns3::AcIndex ac, uint8_t threshold) [member function]
cls.add_method('SetBlockAckThresholdForAc',
'void',
[param('ns3::AcIndex', 'ac'), param('uint8_t', 'threshold')])
## qos-wifi-mac-helper.h (module 'wifi'): void ns3::QosWifiMacHelper::SetMsduAggregatorForAc(ns3::AcIndex ac, std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetMsduAggregatorForAc',
'void',
[param('ns3::AcIndex', 'ac'), param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()')])
## qos-wifi-mac-helper.h (module 'wifi'): void ns3::QosWifiMacHelper::SetType(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetType',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## qos-wifi-mac-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiMac> ns3::QosWifiMacHelper::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiMac >',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3RandomPropagationDelayModel_methods(root_module, cls):
## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel::RandomPropagationDelayModel(ns3::RandomPropagationDelayModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomPropagationDelayModel const &', 'arg0')])
## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel::RandomPropagationDelayModel() [constructor]
cls.add_constructor([])
## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::RandomPropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, is_virtual=True)
## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::RandomPropagationDelayModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3RandomPropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::RandomPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::RandomPropagationLossModel::RandomPropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): double ns3::RandomPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3RangePropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::RangePropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::RangePropagationLossModel::RangePropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): double ns3::RangePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3InterferenceHelperEvent_Ns3Empty_Ns3DefaultDeleter__lt__ns3InterferenceHelperEvent__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> >::SimpleRefCount(ns3::SimpleRefCount<ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter< ns3::InterferenceHelper::Event > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::InterferenceHelper::Event, ns3::empty, ns3::DefaultDeleter<ns3::InterferenceHelper::Event> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3WifiInformationElement_Ns3Empty_Ns3DefaultDeleter__lt__ns3WifiInformationElement__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::SimpleRefCount(ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter< ns3::WifiInformationElement > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3ThreeLogDistancePropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::ThreeLogDistancePropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::ThreeLogDistancePropagationLossModel::ThreeLogDistancePropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): double ns3::ThreeLogDistancePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('!=')
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TwoRayGroundPropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::TwoRayGroundPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::TwoRayGroundPropagationLossModel::TwoRayGroundPropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetLambda(double frequency, double speed) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'frequency'), param('double', 'speed')])
## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetLambda(double lambda) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'lambda')])
## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetSystemLoss(double systemLoss) [member function]
cls.add_method('SetSystemLoss',
'void',
[param('double', 'systemLoss')])
## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetMinDistance(double minDistance) [member function]
cls.add_method('SetMinDistance',
'void',
[param('double', 'minDistance')])
## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetMinDistance() const [member function]
cls.add_method('GetMinDistance',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetSystemLoss() const [member function]
cls.add_method('GetSystemLoss',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetHeightAboveZ(double heightAboveZ) [member function]
cls.add_method('SetHeightAboveZ',
'void',
[param('double', 'heightAboveZ')])
## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3WifiActionHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::WifiActionHeader(ns3::WifiActionHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiActionHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::WifiActionHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::WifiActionHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue ns3::WifiActionHeader::GetAction() [member function]
cls.add_method('GetAction',
'ns3::WifiActionHeader::ActionValue',
[])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::CategoryValue ns3::WifiActionHeader::GetCategory() [member function]
cls.add_method('GetCategory',
'ns3::WifiActionHeader::CategoryValue',
[])
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::WifiActionHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::WifiActionHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::WifiActionHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::SetAction(ns3::WifiActionHeader::CategoryValue type, ns3::WifiActionHeader::ActionValue action) [member function]
cls.add_method('SetAction',
'void',
[param('ns3::WifiActionHeader::CategoryValue', 'type'), param('ns3::WifiActionHeader::ActionValue', 'action')])
return
def register_Ns3WifiActionHeaderActionValue_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::ActionValue() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::ActionValue(ns3::WifiActionHeader::ActionValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiActionHeader::ActionValue const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::blockAck [variable]
cls.add_instance_attribute('blockAck', 'ns3::WifiActionHeader::BlockAckActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::interwork [variable]
cls.add_instance_attribute('interwork', 'ns3::WifiActionHeader::InterworkActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::linkMetrtic [variable]
cls.add_instance_attribute('linkMetrtic', 'ns3::WifiActionHeader::LinkMetricActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::pathSelection [variable]
cls.add_instance_attribute('pathSelection', 'ns3::WifiActionHeader::PathSelectionActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::peerLink [variable]
cls.add_instance_attribute('peerLink', 'ns3::WifiActionHeader::PeerLinkMgtActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::resourceCoordination [variable]
cls.add_instance_attribute('resourceCoordination', 'ns3::WifiActionHeader::ResourceCoordinationActionValue', is_const=False)
return
def register_Ns3WifiInformationElement_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement::WifiInformationElement() [constructor]
cls.add_constructor([])
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement::WifiInformationElement(ns3::WifiInformationElement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiInformationElement const &', 'arg0')])
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::Deserialize(ns3::Buffer::Iterator i) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')])
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::DeserializeIfPresent(ns3::Buffer::Iterator i) [member function]
cls.add_method('DeserializeIfPresent',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')])
## wifi-information-element.h (module 'wifi'): uint8_t ns3::WifiInformationElement::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_pure_virtual=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElementId ns3::WifiInformationElement::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): uint8_t ns3::WifiInformationElement::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): uint16_t ns3::WifiInformationElement::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint16_t',
[],
is_const=True)
## wifi-information-element.h (module 'wifi'): void ns3::WifiInformationElement::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::Serialize(ns3::Buffer::Iterator i) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True)
## wifi-information-element.h (module 'wifi'): void ns3::WifiInformationElement::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiInformationElementVector_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector::WifiInformationElementVector(ns3::WifiInformationElementVector const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiInformationElementVector const &', 'arg0')])
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector::WifiInformationElementVector() [constructor]
cls.add_constructor([])
## wifi-information-element-vector.h (module 'wifi'): bool ns3::WifiInformationElementVector::AddInformationElement(ns3::Ptr<ns3::WifiInformationElement> element) [member function]
cls.add_method('AddInformationElement',
'bool',
[param('ns3::Ptr< ns3::WifiInformationElement >', 'element')])
## wifi-information-element-vector.h (module 'wifi'): __gnu_cxx::__normal_iterator<ns3::Ptr<ns3::WifiInformationElement>*,std::vector<ns3::Ptr<ns3::WifiInformationElement>, std::allocator<ns3::Ptr<ns3::WifiInformationElement> > > > ns3::WifiInformationElementVector::Begin() [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::WifiInformationElement >, std::vector< ns3::Ptr< ns3::WifiInformationElement > > >',
[])
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::DeserializeSingleIe(ns3::Buffer::Iterator start) [member function]
cls.add_method('DeserializeSingleIe',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): __gnu_cxx::__normal_iterator<ns3::Ptr<ns3::WifiInformationElement>*,std::vector<ns3::Ptr<ns3::WifiInformationElement>, std::allocator<ns3::Ptr<ns3::WifiInformationElement> > > > ns3::WifiInformationElementVector::End() [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::WifiInformationElement >, std::vector< ns3::Ptr< ns3::WifiInformationElement > > >',
[])
## wifi-information-element-vector.h (module 'wifi'): ns3::Ptr<ns3::WifiInformationElement> ns3::WifiInformationElementVector::FindFirst(ns3::WifiInformationElementId id) const [member function]
cls.add_method('FindFirst',
'ns3::Ptr< ns3::WifiInformationElement >',
[param('ns3::WifiInformationElementId', 'id')],
is_const=True)
## wifi-information-element-vector.h (module 'wifi'): ns3::TypeId ns3::WifiInformationElementVector::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): static ns3::TypeId ns3::WifiInformationElementVector::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::SetMaxSize(uint16_t size) [member function]
cls.add_method('SetMaxSize',
'void',
[param('uint16_t', 'size')])
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True, visibility='protected')
return
def register_Ns3WifiMac_methods(root_module, cls):
## wifi-mac.h (module 'wifi'): ns3::WifiMac::WifiMac() [constructor]
cls.add_constructor([])
## wifi-mac.h (module 'wifi'): ns3::WifiMac::WifiMac(ns3::WifiMac const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMac const &', 'arg0')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('ConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetAckTimeout() const [member function]
cls.add_method('GetAckTimeout',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::WifiMac::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetBasicBlockAckTimeout() const [member function]
cls.add_method('GetBasicBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::WifiMac::GetBssid() const [member function]
cls.add_method('GetBssid',
'ns3::Mac48Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetCompressedBlockAckTimeout() const [member function]
cls.add_method('GetCompressedBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetCtsTimeout() const [member function]
cls.add_method('GetCtsTimeout',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetEifsNoDifs() const [member function]
cls.add_method('GetEifsNoDifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetMaxPropagationDelay() const [member function]
cls.add_method('GetMaxPropagationDelay',
'ns3::Time',
[],
is_const=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetMsduLifetime() const [member function]
cls.add_method('GetMsduLifetime',
'ns3::Time',
[],
is_const=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetPifs() const [member function]
cls.add_method('GetPifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetSifs() const [member function]
cls.add_method('GetSifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetSlot() const [member function]
cls.add_method('GetSlot',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Ssid ns3::WifiMac::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::WifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyPromiscRx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyPromiscRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyRx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyRxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyTx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyTxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetAckTimeout(ns3::Time ackTimeout) [member function]
cls.add_method('SetAckTimeout',
'void',
[param('ns3::Time', 'ackTimeout')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetBasicBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetBasicBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetCompressedBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetCompressedBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetCtsTimeout(ns3::Time ctsTimeout) [member function]
cls.add_method('SetCtsTimeout',
'void',
[param('ns3::Time', 'ctsTimeout')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetEifsNoDifs(ns3::Time eifsNoDifs) [member function]
cls.add_method('SetEifsNoDifs',
'void',
[param('ns3::Time', 'eifsNoDifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetForwardUpCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> upCallback) [member function]
cls.add_method('SetForwardUpCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'upCallback')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetLinkDownCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkDown) [member function]
cls.add_method('SetLinkDownCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkDown')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetMaxPropagationDelay(ns3::Time delay) [member function]
cls.add_method('SetMaxPropagationDelay',
'void',
[param('ns3::Time', 'delay')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetPifs(ns3::Time pifs) [member function]
cls.add_method('SetPifs',
'void',
[param('ns3::Time', 'pifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetPromisc() [member function]
cls.add_method('SetPromisc',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSlot(ns3::Time slotTime) [member function]
cls.add_method('SetSlot',
'void',
[param('ns3::Time', 'slotTime')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetWifiPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetWifiPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> stationManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'stationManager')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): bool ns3::WifiMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureCCHDcf(ns3::Ptr<ns3::Dcf> dcf, uint32_t cwmin, uint32_t cwmax, ns3::AcIndex ac) [member function]
cls.add_method('ConfigureCCHDcf',
'void',
[param('ns3::Ptr< ns3::Dcf >', 'dcf'), param('uint32_t', 'cwmin'), param('uint32_t', 'cwmax'), param('ns3::AcIndex', 'ac')],
visibility='protected')
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureDcf(ns3::Ptr<ns3::Dcf> dcf, uint32_t cwmin, uint32_t cwmax, ns3::AcIndex ac) [member function]
cls.add_method('ConfigureDcf',
'void',
[param('ns3::Ptr< ns3::Dcf >', 'dcf'), param('uint32_t', 'cwmin'), param('uint32_t', 'cwmax'), param('ns3::AcIndex', 'ac')],
visibility='protected')
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::FinishConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('FinishConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3WifiMacHeader_methods(root_module, cls):
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::WifiMacHeader(ns3::WifiMacHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMacHeader const &', 'arg0')])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::WifiMacHeader() [constructor]
cls.add_constructor([])
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr1() const [member function]
cls.add_method('GetAddr1',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr2() const [member function]
cls.add_method('GetAddr2',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr3() const [member function]
cls.add_method('GetAddr3',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr4() const [member function]
cls.add_method('GetAddr4',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Time ns3::WifiMacHeader::GetDuration() const [member function]
cls.add_method('GetDuration',
'ns3::Time',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetFragmentNumber() const [member function]
cls.add_method('GetFragmentNumber',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::TypeId ns3::WifiMacHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::QosAckPolicy ns3::WifiMacHeader::GetQosAckPolicy() const [member function]
cls.add_method('GetQosAckPolicy',
'ns3::WifiMacHeader::QosAckPolicy',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint8_t ns3::WifiMacHeader::GetQosTid() const [member function]
cls.add_method('GetQosTid',
'uint8_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint8_t ns3::WifiMacHeader::GetQosTxopLimit() const [member function]
cls.add_method('GetQosTxopLimit',
'uint8_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetRawDuration() const [member function]
cls.add_method('GetRawDuration',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetSequenceControl() const [member function]
cls.add_method('GetSequenceControl',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetSequenceNumber() const [member function]
cls.add_method('GetSequenceNumber',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacType ns3::WifiMacHeader::GetType() const [member function]
cls.add_method('GetType',
'ns3::WifiMacType',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): static ns3::TypeId ns3::WifiMacHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-mac-header.h (module 'wifi'): char const * ns3::WifiMacHeader::GetTypeString() const [member function]
cls.add_method('GetTypeString',
'char const *',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAck() const [member function]
cls.add_method('IsAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAction() const [member function]
cls.add_method('IsAction',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAssocReq() const [member function]
cls.add_method('IsAssocReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAssocResp() const [member function]
cls.add_method('IsAssocResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAuthentication() const [member function]
cls.add_method('IsAuthentication',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBeacon() const [member function]
cls.add_method('IsBeacon',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBlockAck() const [member function]
cls.add_method('IsBlockAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBlockAckReq() const [member function]
cls.add_method('IsBlockAckReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCfpoll() const [member function]
cls.add_method('IsCfpoll',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCtl() const [member function]
cls.add_method('IsCtl',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCts() const [member function]
cls.add_method('IsCts',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsData() const [member function]
cls.add_method('IsData',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsDeauthentication() const [member function]
cls.add_method('IsDeauthentication',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsDisassociation() const [member function]
cls.add_method('IsDisassociation',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsFromDs() const [member function]
cls.add_method('IsFromDs',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMgt() const [member function]
cls.add_method('IsMgt',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMoreFragments() const [member function]
cls.add_method('IsMoreFragments',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMultihopAction() const [member function]
cls.add_method('IsMultihopAction',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsProbeReq() const [member function]
cls.add_method('IsProbeReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsProbeResp() const [member function]
cls.add_method('IsProbeResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosAck() const [member function]
cls.add_method('IsQosAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosAmsdu() const [member function]
cls.add_method('IsQosAmsdu',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosBlockAck() const [member function]
cls.add_method('IsQosBlockAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosData() const [member function]
cls.add_method('IsQosData',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosEosp() const [member function]
cls.add_method('IsQosEosp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosNoAck() const [member function]
cls.add_method('IsQosNoAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsReassocReq() const [member function]
cls.add_method('IsReassocReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsReassocResp() const [member function]
cls.add_method('IsReassocResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsRetry() const [member function]
cls.add_method('IsRetry',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsRts() const [member function]
cls.add_method('IsRts',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsToDs() const [member function]
cls.add_method('IsToDs',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAction() [member function]
cls.add_method('SetAction',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr1(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr1',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr2(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr2',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr3(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr3',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr4(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr4',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAssocReq() [member function]
cls.add_method('SetAssocReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAssocResp() [member function]
cls.add_method('SetAssocResp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBeacon() [member function]
cls.add_method('SetBeacon',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBlockAck() [member function]
cls.add_method('SetBlockAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBlockAckReq() [member function]
cls.add_method('SetBlockAckReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsFrom() [member function]
cls.add_method('SetDsFrom',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsNotFrom() [member function]
cls.add_method('SetDsNotFrom',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsNotTo() [member function]
cls.add_method('SetDsNotTo',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsTo() [member function]
cls.add_method('SetDsTo',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDuration(ns3::Time duration) [member function]
cls.add_method('SetDuration',
'void',
[param('ns3::Time', 'duration')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetFragmentNumber(uint8_t frag) [member function]
cls.add_method('SetFragmentNumber',
'void',
[param('uint8_t', 'frag')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetId(uint16_t id) [member function]
cls.add_method('SetId',
'void',
[param('uint16_t', 'id')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetMultihopAction() [member function]
cls.add_method('SetMultihopAction',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetNoMoreFragments() [member function]
cls.add_method('SetNoMoreFragments',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetNoRetry() [member function]
cls.add_method('SetNoRetry',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetProbeReq() [member function]
cls.add_method('SetProbeReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetProbeResp() [member function]
cls.add_method('SetProbeResp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosAckPolicy(ns3::WifiMacHeader::QosAckPolicy arg0) [member function]
cls.add_method('SetQosAckPolicy',
'void',
[param('ns3::WifiMacHeader::QosAckPolicy', 'arg0')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosAmsdu() [member function]
cls.add_method('SetQosAmsdu',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosBlockAck() [member function]
cls.add_method('SetQosBlockAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosEosp() [member function]
cls.add_method('SetQosEosp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoAck() [member function]
cls.add_method('SetQosNoAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoAmsdu() [member function]
cls.add_method('SetQosNoAmsdu',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoEosp() [member function]
cls.add_method('SetQosNoEosp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNormalAck() [member function]
cls.add_method('SetQosNormalAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosTid(uint8_t tid) [member function]
cls.add_method('SetQosTid',
'void',
[param('uint8_t', 'tid')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosTxopLimit(uint8_t txop) [member function]
cls.add_method('SetQosTxopLimit',
'void',
[param('uint8_t', 'txop')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetRawDuration(uint16_t duration) [member function]
cls.add_method('SetRawDuration',
'void',
[param('uint16_t', 'duration')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetRetry() [member function]
cls.add_method('SetRetry',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetSequenceNumber(uint16_t seq) [member function]
cls.add_method('SetSequenceNumber',
'void',
[param('uint16_t', 'seq')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetType(ns3::WifiMacType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::WifiMacType', 'type')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetTypeData() [member function]
cls.add_method('SetTypeData',
'void',
[])
return
def register_Ns3WifiMacQueue_methods(root_module, cls):
## wifi-mac-queue.h (module 'wifi'): ns3::WifiMacQueue::WifiMacQueue(ns3::WifiMacQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMacQueue const &', 'arg0')])
## wifi-mac-queue.h (module 'wifi'): ns3::WifiMacQueue::WifiMacQueue() [constructor]
cls.add_constructor([])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::Dequeue(ns3::WifiMacHeader * hdr) [member function]
cls.add_method('Dequeue',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr')])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::DequeueByTidAndAddress(ns3::WifiMacHeader * hdr, uint8_t tid, ns3::WifiMacHeader::AddressType type, ns3::Mac48Address addr) [member function]
cls.add_method('DequeueByTidAndAddress',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr'), param('uint8_t', 'tid'), param('ns3::WifiMacHeader::AddressType', 'type'), param('ns3::Mac48Address', 'addr')])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::DequeueFirstAvailable(ns3::WifiMacHeader * hdr, ns3::Time & tStamp, ns3::QosBlockedDestinations const * blockedPackets) [member function]
cls.add_method('DequeueFirstAvailable',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr'), param('ns3::Time &', 'tStamp'), param('ns3::QosBlockedDestinations const *', 'blockedPackets')])
## wifi-mac-queue.h (module 'wifi'): void ns3::WifiMacQueue::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## wifi-mac-queue.h (module 'wifi'): void ns3::WifiMacQueue::Flush() [member function]
cls.add_method('Flush',
'void',
[])
## wifi-mac-queue.h (module 'wifi'): ns3::Time ns3::WifiMacQueue::GetMaxDelay() const [member function]
cls.add_method('GetMaxDelay',
'ns3::Time',
[],
is_const=True)
## wifi-mac-queue.h (module 'wifi'): uint32_t ns3::WifiMacQueue::GetMaxSize() const [member function]
cls.add_method('GetMaxSize',
'uint32_t',
[],
is_const=True)
## wifi-mac-queue.h (module 'wifi'): uint32_t ns3::WifiMacQueue::GetNPacketsByTidAndAddress(uint8_t tid, ns3::WifiMacHeader::AddressType type, ns3::Mac48Address addr) [member function]
cls.add_method('GetNPacketsByTidAndAddress',
'uint32_t',
[param('uint8_t', 'tid'), param('ns3::WifiMacHeader::AddressType', 'type'), param('ns3::Mac48Address', 'addr')])
## wifi-mac-queue.h (module 'wifi'): uint32_t ns3::WifiMacQueue::GetSize() [member function]
cls.add_method('GetSize',
'uint32_t',
[])
## wifi-mac-queue.h (module 'wifi'): static ns3::TypeId ns3::WifiMacQueue::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-mac-queue.h (module 'wifi'): bool ns3::WifiMacQueue::IsEmpty() [member function]
cls.add_method('IsEmpty',
'bool',
[])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::Peek(ns3::WifiMacHeader * hdr) [member function]
cls.add_method('Peek',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr')])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::PeekByTidAndAddress(ns3::WifiMacHeader * hdr, uint8_t tid, ns3::WifiMacHeader::AddressType type, ns3::Mac48Address addr) [member function]
cls.add_method('PeekByTidAndAddress',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr'), param('uint8_t', 'tid'), param('ns3::WifiMacHeader::AddressType', 'type'), param('ns3::Mac48Address', 'addr')])
## wifi-mac-queue.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::WifiMacQueue::PeekFirstAvailable(ns3::WifiMacHeader * hdr, ns3::Time & tStamp, ns3::QosBlockedDestinations const * blockedPackets) [member function]
cls.add_method('PeekFirstAvailable',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader *', 'hdr'), param('ns3::Time &', 'tStamp'), param('ns3::QosBlockedDestinations const *', 'blockedPackets')])
## wifi-mac-queue.h (module 'wifi'): void ns3::WifiMacQueue::PushFront(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('PushFront',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## wifi-mac-queue.h (module 'wifi'): bool ns3::WifiMacQueue::Remove(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac-queue.h (module 'wifi'): void ns3::WifiMacQueue::SetMaxDelay(ns3::Time delay) [member function]
cls.add_method('SetMaxDelay',
'void',
[param('ns3::Time', 'delay')])
## wifi-mac-queue.h (module 'wifi'): void ns3::WifiMacQueue::SetMaxSize(uint32_t maxSize) [member function]
cls.add_method('SetMaxSize',
'void',
[param('uint32_t', 'maxSize')])
return
def register_Ns3WifiPhy_methods(root_module, cls):
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::WifiPhy(ns3::WifiPhy const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhy const &', 'arg0')])
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::WifiPhy() [constructor]
cls.add_constructor([])
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::CalculateSnr(ns3::WifiMode txMode, double ber) const [member function]
cls.add_method('CalculateSnr',
'double',
[param('ns3::WifiMode', 'txMode'), param('double', 'ber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::Time ns3::WifiPhy::CalculateTxDuration(uint32_t size, ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('CalculateTxDuration',
'ns3::Time',
[param('uint32_t', 'size'), param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::ConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('ConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::WifiChannel> ns3::WifiPhy::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::WifiChannel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint16_t ns3::WifiPhy::GetChannelNumber() const [member function]
cls.add_method('GetChannelNumber',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetDelayUntilIdle() [member function]
cls.add_method('GetDelayUntilIdle',
'ns3::Time',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate11Mbps() [member function]
cls.add_method('GetDsssRate11Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate1Mbps() [member function]
cls.add_method('GetDsssRate1Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate2Mbps() [member function]
cls.add_method('GetDsssRate2Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate5_5Mbps() [member function]
cls.add_method('GetDsssRate5_5Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate12Mbps() [member function]
cls.add_method('GetErpOfdmRate12Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate18Mbps() [member function]
cls.add_method('GetErpOfdmRate18Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate24Mbps() [member function]
cls.add_method('GetErpOfdmRate24Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate36Mbps() [member function]
cls.add_method('GetErpOfdmRate36Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate48Mbps() [member function]
cls.add_method('GetErpOfdmRate48Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate54Mbps() [member function]
cls.add_method('GetErpOfdmRate54Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate6Mbps() [member function]
cls.add_method('GetErpOfdmRate6Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate9Mbps() [member function]
cls.add_method('GetErpOfdmRate9Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetLastRxStartTime() const [member function]
cls.add_method('GetLastRxStartTime',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::WifiMode ns3::WifiPhy::GetMode(uint32_t mode) const [member function]
cls.add_method('GetMode',
'ns3::WifiMode',
[param('uint32_t', 'mode')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint32_t ns3::WifiPhy::GetNModes() const [member function]
cls.add_method('GetNModes',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint32_t ns3::WifiPhy::GetNTxPower() const [member function]
cls.add_method('GetNTxPower',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12Mbps() [member function]
cls.add_method('GetOfdmRate12Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate12MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate12MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate13_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate13_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate18Mbps() [member function]
cls.add_method('GetOfdmRate18Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate18MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate18MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate1_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate1_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate24Mbps() [member function]
cls.add_method('GetOfdmRate24Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate24MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate24MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate27MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate27MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate2_25MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate2_25MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate36Mbps() [member function]
cls.add_method('GetOfdmRate36Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate3MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate3MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate3MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate3MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate48Mbps() [member function]
cls.add_method('GetOfdmRate48Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate4_5MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate4_5MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate4_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate4_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate54Mbps() [member function]
cls.add_method('GetOfdmRate54Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6Mbps() [member function]
cls.add_method('GetOfdmRate6Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate6MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate6MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9Mbps() [member function]
cls.add_method('GetOfdmRate9Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate9MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate9MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPayloadDurationMicroSeconds(uint32_t size, ns3::WifiMode payloadMode) [member function]
cls.add_method('GetPayloadDurationMicroSeconds',
'uint32_t',
[param('uint32_t', 'size'), param('ns3::WifiMode', 'payloadMode')],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPlcpHeaderDurationMicroSeconds(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpHeaderDurationMicroSeconds',
'uint32_t',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetPlcpHeaderMode(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpHeaderMode',
'ns3::WifiMode',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPlcpPreambleDurationMicroSeconds(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpPreambleDurationMicroSeconds',
'uint32_t',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetStateDuration() [member function]
cls.add_method('GetStateDuration',
'ns3::Time',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::GetTxPowerEnd() const [member function]
cls.add_method('GetTxPowerEnd',
'double',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::GetTxPowerStart() const [member function]
cls.add_method('GetTxPowerStart',
'double',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::TypeId ns3::WifiPhy::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateBusy() [member function]
cls.add_method('IsStateBusy',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateCcaBusy() [member function]
cls.add_method('IsStateCcaBusy',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateIdle() [member function]
cls.add_method('IsStateIdle',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateRx() [member function]
cls.add_method('IsStateRx',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateSwitching() [member function]
cls.add_method('IsStateSwitching',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateTx() [member function]
cls.add_method('IsStateTx',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyMonitorSniffRx(ns3::Ptr<ns3::Packet const> packet, uint16_t channelFreqMhz, uint16_t channelNumber, uint32_t rate, bool isShortPreamble, double signalDbm, double noiseDbm) [member function]
cls.add_method('NotifyMonitorSniffRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'channelFreqMhz'), param('uint16_t', 'channelNumber'), param('uint32_t', 'rate'), param('bool', 'isShortPreamble'), param('double', 'signalDbm'), param('double', 'noiseDbm')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyMonitorSniffTx(ns3::Ptr<ns3::Packet const> packet, uint16_t channelFreqMhz, uint16_t channelNumber, uint32_t rate, bool isShortPreamble) [member function]
cls.add_method('NotifyMonitorSniffTx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'channelFreqMhz'), param('uint16_t', 'channelNumber'), param('uint32_t', 'rate'), param('bool', 'isShortPreamble')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxBegin(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxBegin',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxBegin(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxBegin',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::RegisterListener(ns3::WifiPhyListener * listener) [member function]
cls.add_method('RegisterListener',
'void',
[param('ns3::WifiPhyListener *', 'listener')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SendPacket(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMode mode, ns3::WifiPreamble preamble, uint8_t txPowerLevel) [member function]
cls.add_method('SendPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble'), param('uint8_t', 'txPowerLevel')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetChannelNumber(uint16_t id) [member function]
cls.add_method('SetChannelNumber',
'void',
[param('uint16_t', 'id')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetReceiveErrorCallback(ns3::Callback<void,ns3::Ptr<const ns3::Packet>,double,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetReceiveOkCallback(ns3::Callback<void,ns3::Ptr<ns3::Packet>,double,ns3::WifiMode,ns3::WifiPreamble,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveOkCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiMode, ns3::WifiPreamble, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3WifiRemoteStationManager_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager::WifiRemoteStationManager(ns3::WifiRemoteStationManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationManager const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager::WifiRemoteStationManager() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::AddBasicMode(ns3::WifiMode mode) [member function]
cls.add_method('AddBasicMode',
'void',
[param('ns3::WifiMode', 'mode')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::AddSupportedMode(ns3::Mac48Address address, ns3::WifiMode mode) [member function]
cls.add_method('AddSupportedMode',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'mode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetAckMode(ns3::Mac48Address address, ns3::WifiMode dataMode) [member function]
cls.add_method('GetAckMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'dataMode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetBasicMode(uint32_t i) const [member function]
cls.add_method('GetBasicMode',
'ns3::WifiMode',
[param('uint32_t', 'i')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetCtsMode(ns3::Mac48Address address, ns3::WifiMode rtsMode) [member function]
cls.add_method('GetCtsMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'rtsMode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetDataMode(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fullPacketSize) [member function]
cls.add_method('GetDataMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fullPacketSize')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetDefaultMode() const [member function]
cls.add_method('GetDefaultMode',
'ns3::WifiMode',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentOffset(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('GetFragmentOffset',
'uint32_t',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentSize(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('GetFragmentSize',
'uint32_t',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentationThreshold() const [member function]
cls.add_method('GetFragmentationThreshold',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo ns3::WifiRemoteStationManager::GetInfo(ns3::Mac48Address address) [member function]
cls.add_method('GetInfo',
'ns3::WifiRemoteStationInfo',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetMaxSlrc() const [member function]
cls.add_method('GetMaxSlrc',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetMaxSsrc() const [member function]
cls.add_method('GetMaxSsrc',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetNBasicModes() const [member function]
cls.add_method('GetNBasicModes',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetNonUnicastMode() const [member function]
cls.add_method('GetNonUnicastMode',
'ns3::WifiMode',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetRtsCtsThreshold() const [member function]
cls.add_method('GetRtsCtsThreshold',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetRtsMode(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('GetRtsMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): static ns3::TypeId ns3::WifiRemoteStationManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsAssociated(ns3::Mac48Address address) const [member function]
cls.add_method('IsAssociated',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsBrandNew(ns3::Mac48Address address) const [member function]
cls.add_method('IsBrandNew',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsLastFragment(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('IsLastFragment',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsWaitAssocTxOk(ns3::Mac48Address address) const [member function]
cls.add_method('IsWaitAssocTxOk',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedDataRetransmission(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedDataRetransmission',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedFragmentation(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedFragmentation',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedRts(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedRts',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedRtsRetransmission(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedRtsRetransmission',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::PrepareForQueue(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fullPacketSize) [member function]
cls.add_method('PrepareForQueue',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fullPacketSize')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordDisassociated(ns3::Mac48Address address) [member function]
cls.add_method('RecordDisassociated',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordGotAssocTxFailed(ns3::Mac48Address address) [member function]
cls.add_method('RecordGotAssocTxFailed',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordGotAssocTxOk(ns3::Mac48Address address) [member function]
cls.add_method('RecordGotAssocTxOk',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordWaitAssocTxOk(ns3::Mac48Address address) [member function]
cls.add_method('RecordWaitAssocTxOk',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportDataFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportDataFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportDataOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('ReportDataOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportFinalDataFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportFinalDataFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportFinalRtsFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportFinalRtsFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRtsFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportRtsFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRtsOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('ReportRtsOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRxOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('ReportRxOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::Reset() [member function]
cls.add_method('Reset',
'void',
[])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::Reset(ns3::Mac48Address address) [member function]
cls.add_method('Reset',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetFragmentationThreshold(uint32_t threshold) [member function]
cls.add_method('SetFragmentationThreshold',
'void',
[param('uint32_t', 'threshold')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetMaxSlrc(uint32_t maxSlrc) [member function]
cls.add_method('SetMaxSlrc',
'void',
[param('uint32_t', 'maxSlrc')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetMaxSsrc(uint32_t maxSsrc) [member function]
cls.add_method('SetMaxSsrc',
'void',
[param('uint32_t', 'maxSsrc')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetRtsCtsThreshold(uint32_t threshold) [member function]
cls.add_method('SetRtsCtsThreshold',
'void',
[param('uint32_t', 'threshold')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetupPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetupPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetNSupported(ns3::WifiRemoteStation const * station) const [member function]
cls.add_method('GetNSupported',
'uint32_t',
[param('ns3::WifiRemoteStation const *', 'station')],
is_const=True, visibility='protected')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetSupported(ns3::WifiRemoteStation const * station, uint32_t i) const [member function]
cls.add_method('GetSupported',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation const *', 'station'), param('uint32_t', 'i')],
is_const=True, visibility='protected')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::WifiRemoteStationManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedDataRetransmission(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedDataRetransmission',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedFragmentation(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedFragmentation',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedRts(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRts',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedRtsRetransmission(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRtsRetransmission',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3YansWifiPhy_methods(root_module, cls):
## yans-wifi-phy.h (module 'wifi'): static ns3::TypeId ns3::YansWifiPhy::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## yans-wifi-phy.h (module 'wifi'): ns3::YansWifiPhy::YansWifiPhy() [constructor]
cls.add_constructor([])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetChannel(ns3::Ptr<ns3::YansWifiChannel> channel) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::YansWifiChannel >', 'channel')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetChannelNumber(uint16_t id) [member function]
cls.add_method('SetChannelNumber',
'void',
[param('uint16_t', 'id')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): uint16_t ns3::YansWifiPhy::GetChannelNumber() const [member function]
cls.add_method('GetChannelNumber',
'uint16_t',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetChannelFrequencyMhz() const [member function]
cls.add_method('GetChannelFrequencyMhz',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::StartReceivePacket(ns3::Ptr<ns3::Packet> packet, double rxPowerDbm, ns3::WifiMode mode, ns3::WifiPreamble preamble) [member function]
cls.add_method('StartReceivePacket',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('double', 'rxPowerDbm'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetRxNoiseFigure(double noiseFigureDb) [member function]
cls.add_method('SetRxNoiseFigure',
'void',
[param('double', 'noiseFigureDb')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetTxPowerStart(double start) [member function]
cls.add_method('SetTxPowerStart',
'void',
[param('double', 'start')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetTxPowerEnd(double end) [member function]
cls.add_method('SetTxPowerEnd',
'void',
[param('double', 'end')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetNTxPower(uint32_t n) [member function]
cls.add_method('SetNTxPower',
'void',
[param('uint32_t', 'n')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetTxGain(double gain) [member function]
cls.add_method('SetTxGain',
'void',
[param('double', 'gain')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetRxGain(double gain) [member function]
cls.add_method('SetRxGain',
'void',
[param('double', 'gain')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetEdThreshold(double threshold) [member function]
cls.add_method('SetEdThreshold',
'void',
[param('double', 'threshold')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetCcaMode1Threshold(double threshold) [member function]
cls.add_method('SetCcaMode1Threshold',
'void',
[param('double', 'threshold')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetErrorRateModel(ns3::Ptr<ns3::ErrorRateModel> rate) [member function]
cls.add_method('SetErrorRateModel',
'void',
[param('ns3::Ptr< ns3::ErrorRateModel >', 'rate')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetDevice(ns3::Ptr<ns3::Object> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::Object >', 'device')])
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetMobility(ns3::Ptr<ns3::Object> mobility) [member function]
cls.add_method('SetMobility',
'void',
[param('ns3::Ptr< ns3::Object >', 'mobility')])
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetRxNoiseFigure() const [member function]
cls.add_method('GetRxNoiseFigure',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetTxGain() const [member function]
cls.add_method('GetTxGain',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetRxGain() const [member function]
cls.add_method('GetRxGain',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetEdThreshold() const [member function]
cls.add_method('GetEdThreshold',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetCcaMode1Threshold() const [member function]
cls.add_method('GetCcaMode1Threshold',
'double',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::ErrorRateModel> ns3::YansWifiPhy::GetErrorRateModel() const [member function]
cls.add_method('GetErrorRateModel',
'ns3::Ptr< ns3::ErrorRateModel >',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::Object> ns3::YansWifiPhy::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::Object> ns3::YansWifiPhy::GetMobility() [member function]
cls.add_method('GetMobility',
'ns3::Ptr< ns3::Object >',
[])
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetTxPowerStart() const [member function]
cls.add_method('GetTxPowerStart',
'double',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::GetTxPowerEnd() const [member function]
cls.add_method('GetTxPowerEnd',
'double',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): uint32_t ns3::YansWifiPhy::GetNTxPower() const [member function]
cls.add_method('GetNTxPower',
'uint32_t',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetReceiveOkCallback(ns3::Callback<void,ns3::Ptr<ns3::Packet>,double,ns3::WifiMode,ns3::WifiPreamble,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveOkCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiMode, ns3::WifiPreamble, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SetReceiveErrorCallback(ns3::Callback<void,ns3::Ptr<const ns3::Packet>,double,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::SendPacket(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMode mode, ns3::WifiPreamble preamble, uint8_t txPowerLevel) [member function]
cls.add_method('SendPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble'), param('uint8_t', 'txPowerLevel')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::RegisterListener(ns3::WifiPhyListener * listener) [member function]
cls.add_method('RegisterListener',
'void',
[param('ns3::WifiPhyListener *', 'listener')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateCcaBusy() [member function]
cls.add_method('IsStateCcaBusy',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateIdle() [member function]
cls.add_method('IsStateIdle',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateBusy() [member function]
cls.add_method('IsStateBusy',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateRx() [member function]
cls.add_method('IsStateRx',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateTx() [member function]
cls.add_method('IsStateTx',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): bool ns3::YansWifiPhy::IsStateSwitching() [member function]
cls.add_method('IsStateSwitching',
'bool',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Time ns3::YansWifiPhy::GetStateDuration() [member function]
cls.add_method('GetStateDuration',
'ns3::Time',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Time ns3::YansWifiPhy::GetDelayUntilIdle() [member function]
cls.add_method('GetDelayUntilIdle',
'ns3::Time',
[],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Time ns3::YansWifiPhy::GetLastRxStartTime() const [member function]
cls.add_method('GetLastRxStartTime',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): uint32_t ns3::YansWifiPhy::GetNModes() const [member function]
cls.add_method('GetNModes',
'uint32_t',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): ns3::WifiMode ns3::YansWifiPhy::GetMode(uint32_t mode) const [member function]
cls.add_method('GetMode',
'ns3::WifiMode',
[param('uint32_t', 'mode')],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): double ns3::YansWifiPhy::CalculateSnr(ns3::WifiMode txMode, double ber) const [member function]
cls.add_method('CalculateSnr',
'double',
[param('ns3::WifiMode', 'txMode'), param('double', 'ber')],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::WifiChannel> ns3::YansWifiPhy::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::WifiChannel >',
[],
is_const=True, is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::ConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('ConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_virtual=True)
## yans-wifi-phy.h (module 'wifi'): void ns3::YansWifiPhy::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3AarfWifiManager_methods(root_module, cls):
## aarf-wifi-manager.h (module 'wifi'): ns3::AarfWifiManager::AarfWifiManager(ns3::AarfWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AarfWifiManager const &', 'arg0')])
## aarf-wifi-manager.h (module 'wifi'): ns3::AarfWifiManager::AarfWifiManager() [constructor]
cls.add_constructor([])
## aarf-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::AarfWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## aarf-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::AarfWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AarfWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AarfWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): void ns3::AarfWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## aarf-wifi-manager.h (module 'wifi'): bool ns3::AarfWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3AarfcdWifiManager_methods(root_module, cls):
## aarfcd-wifi-manager.h (module 'wifi'): ns3::AarfcdWifiManager::AarfcdWifiManager(ns3::AarfcdWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AarfcdWifiManager const &', 'arg0')])
## aarfcd-wifi-manager.h (module 'wifi'): ns3::AarfcdWifiManager::AarfcdWifiManager() [constructor]
cls.add_constructor([])
## aarfcd-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::AarfcdWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## aarfcd-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::AarfcdWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AarfcdWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AarfcdWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): bool ns3::AarfcdWifiManager::DoNeedRts(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRts',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): void ns3::AarfcdWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## aarfcd-wifi-manager.h (module 'wifi'): bool ns3::AarfcdWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3AmrrWifiManager_methods(root_module, cls):
## amrr-wifi-manager.h (module 'wifi'): ns3::AmrrWifiManager::AmrrWifiManager(ns3::AmrrWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AmrrWifiManager const &', 'arg0')])
## amrr-wifi-manager.h (module 'wifi'): ns3::AmrrWifiManager::AmrrWifiManager() [constructor]
cls.add_constructor([])
## amrr-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::AmrrWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## amrr-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::AmrrWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AmrrWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::AmrrWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): void ns3::AmrrWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## amrr-wifi-manager.h (module 'wifi'): bool ns3::AmrrWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3AmsduSubframeHeader_methods(root_module, cls):
## amsdu-subframe-header.h (module 'wifi'): ns3::AmsduSubframeHeader::AmsduSubframeHeader(ns3::AmsduSubframeHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AmsduSubframeHeader const &', 'arg0')])
## amsdu-subframe-header.h (module 'wifi'): ns3::AmsduSubframeHeader::AmsduSubframeHeader() [constructor]
cls.add_constructor([])
## amsdu-subframe-header.h (module 'wifi'): uint32_t ns3::AmsduSubframeHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## amsdu-subframe-header.h (module 'wifi'): ns3::Mac48Address ns3::AmsduSubframeHeader::GetDestinationAddr() const [member function]
cls.add_method('GetDestinationAddr',
'ns3::Mac48Address',
[],
is_const=True)
## amsdu-subframe-header.h (module 'wifi'): ns3::TypeId ns3::AmsduSubframeHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## amsdu-subframe-header.h (module 'wifi'): uint16_t ns3::AmsduSubframeHeader::GetLength() const [member function]
cls.add_method('GetLength',
'uint16_t',
[],
is_const=True)
## amsdu-subframe-header.h (module 'wifi'): uint32_t ns3::AmsduSubframeHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## amsdu-subframe-header.h (module 'wifi'): ns3::Mac48Address ns3::AmsduSubframeHeader::GetSourceAddr() const [member function]
cls.add_method('GetSourceAddr',
'ns3::Mac48Address',
[],
is_const=True)
## amsdu-subframe-header.h (module 'wifi'): static ns3::TypeId ns3::AmsduSubframeHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## amsdu-subframe-header.h (module 'wifi'): void ns3::AmsduSubframeHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## amsdu-subframe-header.h (module 'wifi'): void ns3::AmsduSubframeHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## amsdu-subframe-header.h (module 'wifi'): void ns3::AmsduSubframeHeader::SetDestinationAddr(ns3::Mac48Address to) [member function]
cls.add_method('SetDestinationAddr',
'void',
[param('ns3::Mac48Address', 'to')])
## amsdu-subframe-header.h (module 'wifi'): void ns3::AmsduSubframeHeader::SetLength(uint16_t arg0) [member function]
cls.add_method('SetLength',
'void',
[param('uint16_t', 'arg0')])
## amsdu-subframe-header.h (module 'wifi'): void ns3::AmsduSubframeHeader::SetSourceAddr(ns3::Mac48Address to) [member function]
cls.add_method('SetSourceAddr',
'void',
[param('ns3::Mac48Address', 'to')])
return
def register_Ns3ArfWifiManager_methods(root_module, cls):
## arf-wifi-manager.h (module 'wifi'): ns3::ArfWifiManager::ArfWifiManager(ns3::ArfWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ArfWifiManager const &', 'arg0')])
## arf-wifi-manager.h (module 'wifi'): ns3::ArfWifiManager::ArfWifiManager() [constructor]
cls.add_constructor([])
## arf-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::ArfWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## arf-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::ArfWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::ArfWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::ArfWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): void ns3::ArfWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## arf-wifi-manager.h (module 'wifi'): bool ns3::ArfWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3AthstatsWifiTraceSink_methods(root_module, cls):
## athstats-helper.h (module 'wifi'): ns3::AthstatsWifiTraceSink::AthstatsWifiTraceSink(ns3::AthstatsWifiTraceSink const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AthstatsWifiTraceSink const &', 'arg0')])
## athstats-helper.h (module 'wifi'): ns3::AthstatsWifiTraceSink::AthstatsWifiTraceSink() [constructor]
cls.add_constructor([])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::DevRxTrace(std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DevRxTrace',
'void',
[param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::DevTxTrace(std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DevTxTrace',
'void',
[param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## athstats-helper.h (module 'wifi'): static ns3::TypeId ns3::AthstatsWifiTraceSink::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::Open(std::string const & name) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'name')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::PhyRxErrorTrace(std::string context, ns3::Ptr<ns3::Packet const> packet, double snr) [member function]
cls.add_method('PhyRxErrorTrace',
'void',
[param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('double', 'snr')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::PhyRxOkTrace(std::string context, ns3::Ptr<ns3::Packet const> packet, double snr, ns3::WifiMode mode, ns3::WifiPreamble preamble) [member function]
cls.add_method('PhyRxOkTrace',
'void',
[param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('double', 'snr'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::PhyStateTrace(std::string context, ns3::Time start, ns3::Time duration, ns3::WifiPhy::State state) [member function]
cls.add_method('PhyStateTrace',
'void',
[param('std::string', 'context'), param('ns3::Time', 'start'), param('ns3::Time', 'duration'), param('ns3::WifiPhy::State', 'state')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::PhyTxTrace(std::string context, ns3::Ptr<ns3::Packet const> packet, ns3::WifiMode mode, ns3::WifiPreamble preamble, uint8_t txPower) [member function]
cls.add_method('PhyTxTrace',
'void',
[param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble'), param('uint8_t', 'txPower')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::TxDataFailedTrace(std::string context, ns3::Mac48Address address) [member function]
cls.add_method('TxDataFailedTrace',
'void',
[param('std::string', 'context'), param('ns3::Mac48Address', 'address')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::TxFinalDataFailedTrace(std::string context, ns3::Mac48Address address) [member function]
cls.add_method('TxFinalDataFailedTrace',
'void',
[param('std::string', 'context'), param('ns3::Mac48Address', 'address')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::TxFinalRtsFailedTrace(std::string context, ns3::Mac48Address address) [member function]
cls.add_method('TxFinalRtsFailedTrace',
'void',
[param('std::string', 'context'), param('ns3::Mac48Address', 'address')])
## athstats-helper.h (module 'wifi'): void ns3::AthstatsWifiTraceSink::TxRtsFailedTrace(std::string context, ns3::Mac48Address address) [member function]
cls.add_method('TxRtsFailedTrace',
'void',
[param('std::string', 'context'), param('ns3::Mac48Address', 'address')])
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3CaraWifiManager_methods(root_module, cls):
## cara-wifi-manager.h (module 'wifi'): ns3::CaraWifiManager::CaraWifiManager(ns3::CaraWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CaraWifiManager const &', 'arg0')])
## cara-wifi-manager.h (module 'wifi'): ns3::CaraWifiManager::CaraWifiManager() [constructor]
cls.add_constructor([])
## cara-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::CaraWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## cara-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::CaraWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::CaraWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::CaraWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): bool ns3::CaraWifiManager::DoNeedRts(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRts',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): void ns3::CaraWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## cara-wifi-manager.h (module 'wifi'): bool ns3::CaraWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3ConstantRateWifiManager_methods(root_module, cls):
## constant-rate-wifi-manager.h (module 'wifi'): ns3::ConstantRateWifiManager::ConstantRateWifiManager(ns3::ConstantRateWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantRateWifiManager const &', 'arg0')])
## constant-rate-wifi-manager.h (module 'wifi'): ns3::ConstantRateWifiManager::ConstantRateWifiManager() [constructor]
cls.add_constructor([])
## constant-rate-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::ConstantRateWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## constant-rate-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::ConstantRateWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::ConstantRateWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::ConstantRateWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): void ns3::ConstantRateWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## constant-rate-wifi-manager.h (module 'wifi'): bool ns3::ConstantRateWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ConstantSpeedPropagationDelayModel_methods(root_module, cls):
## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel::ConstantSpeedPropagationDelayModel(ns3::ConstantSpeedPropagationDelayModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantSpeedPropagationDelayModel const &', 'arg0')])
## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel::ConstantSpeedPropagationDelayModel() [constructor]
cls.add_constructor([])
## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::ConstantSpeedPropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, is_virtual=True)
## propagation-delay-model.h (module 'propagation'): double ns3::ConstantSpeedPropagationDelayModel::GetSpeed() const [member function]
cls.add_method('GetSpeed',
'double',
[],
is_const=True)
## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::ConstantSpeedPropagationDelayModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-delay-model.h (module 'propagation'): void ns3::ConstantSpeedPropagationDelayModel::SetSpeed(double speed) [member function]
cls.add_method('SetSpeed',
'void',
[param('double', 'speed')])
return
def register_Ns3Cost231PropagationLossModel_methods(root_module, cls):
## cost231-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::Cost231PropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Cost231PropagationLossModel() [constructor]
cls.add_constructor([])
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('GetLoss',
'double',
[param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetBSAntennaHeight(double height) [member function]
cls.add_method('SetBSAntennaHeight',
'void',
[param('double', 'height')])
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetSSAntennaHeight(double height) [member function]
cls.add_method('SetSSAntennaHeight',
'void',
[param('double', 'height')])
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetEnvironment(ns3::Cost231PropagationLossModel::Environment env) [member function]
cls.add_method('SetEnvironment',
'void',
[param('ns3::Cost231PropagationLossModel::Environment', 'env')])
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetLambda(double lambda) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'lambda')])
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetMinDistance(double minDistance) [member function]
cls.add_method('SetMinDistance',
'void',
[param('double', 'minDistance')])
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetBSAntennaHeight() const [member function]
cls.add_method('GetBSAntennaHeight',
'double',
[],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetSSAntennaHeight() const [member function]
cls.add_method('GetSSAntennaHeight',
'double',
[],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Environment ns3::Cost231PropagationLossModel::GetEnvironment() const [member function]
cls.add_method('GetEnvironment',
'ns3::Cost231PropagationLossModel::Environment',
[],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetMinDistance() const [member function]
cls.add_method('GetMinDistance',
'double',
[],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetLambda(double frequency, double speed) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'frequency'), param('double', 'speed')])
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetShadowing() [member function]
cls.add_method('GetShadowing',
'double',
[])
## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetShadowing(double shadowing) [member function]
cls.add_method('SetShadowing',
'void',
[param('double', 'shadowing')])
## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3CtrlBAckRequestHeader_methods(root_module, cls):
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader::CtrlBAckRequestHeader(ns3::CtrlBAckRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CtrlBAckRequestHeader const &', 'arg0')])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader::CtrlBAckRequestHeader() [constructor]
cls.add_constructor([])
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ctrl-headers.h (module 'wifi'): ns3::TypeId ns3::CtrlBAckRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckRequestHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckRequestHeader::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint8_t ns3::CtrlBAckRequestHeader::GetTidInfo() const [member function]
cls.add_method('GetTidInfo',
'uint8_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): static ns3::TypeId ns3::CtrlBAckRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsBasic() const [member function]
cls.add_method('IsBasic',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsCompressed() const [member function]
cls.add_method('IsCompressed',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsMultiTid() const [member function]
cls.add_method('IsMultiTid',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::MustSendHtImmediateAck() const [member function]
cls.add_method('MustSendHtImmediateAck',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetHtImmediateAck(bool immediateAck) [member function]
cls.add_method('SetHtImmediateAck',
'void',
[param('bool', 'immediateAck')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetTidInfo(uint8_t tid) [member function]
cls.add_method('SetTidInfo',
'void',
[param('uint8_t', 'tid')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetType(ns3::BlockAckType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::BlockAckType', 'type')])
return
def register_Ns3CtrlBAckResponseHeader_methods(root_module, cls):
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader::CtrlBAckResponseHeader(ns3::CtrlBAckResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CtrlBAckResponseHeader const &', 'arg0')])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader::CtrlBAckResponseHeader() [constructor]
cls.add_constructor([])
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t const * ns3::CtrlBAckResponseHeader::GetBitmap() const [member function]
cls.add_method('GetBitmap',
'uint16_t const *',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint64_t ns3::CtrlBAckResponseHeader::GetCompressedBitmap() const [member function]
cls.add_method('GetCompressedBitmap',
'uint64_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): ns3::TypeId ns3::CtrlBAckResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckResponseHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckResponseHeader::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint8_t ns3::CtrlBAckResponseHeader::GetTidInfo() const [member function]
cls.add_method('GetTidInfo',
'uint8_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): static ns3::TypeId ns3::CtrlBAckResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsBasic() const [member function]
cls.add_method('IsBasic',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsCompressed() const [member function]
cls.add_method('IsCompressed',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsFragmentReceived(uint16_t seq, uint8_t frag) const [member function]
cls.add_method('IsFragmentReceived',
'bool',
[param('uint16_t', 'seq'), param('uint8_t', 'frag')],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsMultiTid() const [member function]
cls.add_method('IsMultiTid',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsPacketReceived(uint16_t seq) const [member function]
cls.add_method('IsPacketReceived',
'bool',
[param('uint16_t', 'seq')],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::MustSendHtImmediateAck() const [member function]
cls.add_method('MustSendHtImmediateAck',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::ResetBitmap() [member function]
cls.add_method('ResetBitmap',
'void',
[])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetHtImmediateAck(bool immeadiateAck) [member function]
cls.add_method('SetHtImmediateAck',
'void',
[param('bool', 'immeadiateAck')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetReceivedFragment(uint16_t seq, uint8_t frag) [member function]
cls.add_method('SetReceivedFragment',
'void',
[param('uint16_t', 'seq'), param('uint8_t', 'frag')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetReceivedPacket(uint16_t seq) [member function]
cls.add_method('SetReceivedPacket',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetStartingSequenceControl(uint16_t seqControl) [member function]
cls.add_method('SetStartingSequenceControl',
'void',
[param('uint16_t', 'seqControl')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetTidInfo(uint8_t tid) [member function]
cls.add_method('SetTidInfo',
'void',
[param('uint8_t', 'tid')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetType(ns3::BlockAckType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::BlockAckType', 'type')])
return
def register_Ns3Dcf_methods(root_module, cls):
## dcf.h (module 'wifi'): ns3::Dcf::Dcf() [constructor]
cls.add_constructor([])
## dcf.h (module 'wifi'): ns3::Dcf::Dcf(ns3::Dcf const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Dcf const &', 'arg0')])
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): static ns3::TypeId ns3::Dcf::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_pure_virtual=True, is_virtual=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_pure_virtual=True, is_virtual=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3DoubleValue_methods(root_module, cls):
## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor]
cls.add_constructor([])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor]
cls.add_constructor([param('double const &', 'value')])
## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function]
cls.add_method('Get',
'double',
[],
is_const=True)
## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function]
cls.add_method('Set',
'void',
[param('double const &', 'value')])
return
def register_Ns3EdcaTxopN_methods(root_module, cls):
## edca-txop-n.h (module 'wifi'): static ns3::TypeId ns3::EdcaTxopN::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## edca-txop-n.h (module 'wifi'): ns3::EdcaTxopN::EdcaTxopN() [constructor]
cls.add_constructor([])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetLow(ns3::Ptr<ns3::MacLow> low) [member function]
cls.add_method('SetLow',
'void',
[param('ns3::Ptr< ns3::MacLow >', 'low')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxMiddle(ns3::MacTxMiddle * txMiddle) [member function]
cls.add_method('SetTxMiddle',
'void',
[param('ns3::MacTxMiddle *', 'txMiddle')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetManager(ns3::DcfManager * manager) [member function]
cls.add_method('SetManager',
'void',
[param('ns3::DcfManager *', 'manager')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxOkCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxOkCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxFailedCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxFailedCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> remoteManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'remoteManager')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTypeOfStation(ns3::TypeOfStation type) [member function]
cls.add_method('SetTypeOfStation',
'void',
[param('ns3::TypeOfStation', 'type')])
## edca-txop-n.h (module 'wifi'): ns3::TypeOfStation ns3::EdcaTxopN::GetTypeOfStation() const [member function]
cls.add_method('GetTypeOfStation',
'ns3::TypeOfStation',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::WifiMacQueue> ns3::EdcaTxopN::GetQueue() const [member function]
cls.add_method('GetQueue',
'ns3::Ptr< ns3::WifiMacQueue >',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::MacLow> ns3::EdcaTxopN::Low() [member function]
cls.add_method('Low',
'ns3::Ptr< ns3::MacLow >',
[])
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::MsduAggregator> ns3::EdcaTxopN::GetMsduAggregator() const [member function]
cls.add_method('GetMsduAggregator',
'ns3::Ptr< ns3::MsduAggregator >',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedsAccess() const [member function]
cls.add_method('NeedsAccess',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyAccessGranted() [member function]
cls.add_method('NotifyAccessGranted',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyInternalCollision() [member function]
cls.add_method('NotifyInternalCollision',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyCollision() [member function]
cls.add_method('NotifyCollision',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyChannelSwitching() [member function]
cls.add_method('NotifyChannelSwitching',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotCts(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotCts',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedCts() [member function]
cls.add_method('MissedCts',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotAck(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotAck',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotBlockAck(ns3::CtrlBAckResponseHeader const * blockAck, ns3::Mac48Address recipient) [member function]
cls.add_method('GotBlockAck',
'void',
[param('ns3::CtrlBAckResponseHeader const *', 'blockAck'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedBlockAck() [member function]
cls.add_method('MissedBlockAck',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotAddBaResponse(ns3::MgtAddBaResponseHeader const * respHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('GotAddBaResponse',
'void',
[param('ns3::MgtAddBaResponseHeader const *', 'respHdr'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotDelBaFrame(ns3::MgtDelBaHeader const * delBaHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('GotDelBaFrame',
'void',
[param('ns3::MgtDelBaHeader const *', 'delBaHdr'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedAck() [member function]
cls.add_method('MissedAck',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::StartNext() [member function]
cls.add_method('StartNext',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::RestartAccessIfNeeded() [member function]
cls.add_method('RestartAccessIfNeeded',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::StartAccessIfNeeded() [member function]
cls.add_method('StartAccessIfNeeded',
'void',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedRts() [member function]
cls.add_method('NeedRts',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedRtsRetransmission() [member function]
cls.add_method('NeedRtsRetransmission',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedDataRetransmission() [member function]
cls.add_method('NeedDataRetransmission',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedFragmentation() const [member function]
cls.add_method('NeedFragmentation',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetNextFragmentSize() [member function]
cls.add_method('GetNextFragmentSize',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetFragmentSize() [member function]
cls.add_method('GetFragmentSize',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetFragmentOffset() [member function]
cls.add_method('GetFragmentOffset',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NextFragment() [member function]
cls.add_method('NextFragment',
'void',
[])
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::Packet> ns3::EdcaTxopN::GetFragmentPacket(ns3::WifiMacHeader * hdr) [member function]
cls.add_method('GetFragmentPacket',
'ns3::Ptr< ns3::Packet >',
[param('ns3::WifiMacHeader *', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetAccessCategory(ns3::AcIndex ac) [member function]
cls.add_method('SetAccessCategory',
'void',
[param('ns3::AcIndex', 'ac')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::Queue(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('Queue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMsduAggregator(ns3::Ptr<ns3::MsduAggregator> aggr) [member function]
cls.add_method('SetMsduAggregator',
'void',
[param('ns3::Ptr< ns3::MsduAggregator >', 'aggr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::PushFront(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('PushFront',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::CompleteConfig() [member function]
cls.add_method('CompleteConfig',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetBlockAckThreshold(uint8_t threshold) [member function]
cls.add_method('SetBlockAckThreshold',
'void',
[param('uint8_t', 'threshold')])
## edca-txop-n.h (module 'wifi'): uint8_t ns3::EdcaTxopN::GetBlockAckThreshold() const [member function]
cls.add_method('GetBlockAckThreshold',
'uint8_t',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetBlockAckInactivityTimeout(uint16_t timeout) [member function]
cls.add_method('SetBlockAckInactivityTimeout',
'void',
[param('uint16_t', 'timeout')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SendDelbaFrame(ns3::Mac48Address addr, uint8_t tid, bool byOriginator) [member function]
cls.add_method('SendDelbaFrame',
'void',
[param('ns3::Mac48Address', 'addr'), param('uint8_t', 'tid'), param('bool', 'byOriginator')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErrorRateModel_methods(root_module, cls):
## error-rate-model.h (module 'wifi'): ns3::ErrorRateModel::ErrorRateModel() [constructor]
cls.add_constructor([])
## error-rate-model.h (module 'wifi'): ns3::ErrorRateModel::ErrorRateModel(ns3::ErrorRateModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ErrorRateModel const &', 'arg0')])
## error-rate-model.h (module 'wifi'): double ns3::ErrorRateModel::CalculateSnr(ns3::WifiMode txMode, double ber) const [member function]
cls.add_method('CalculateSnr',
'double',
[param('ns3::WifiMode', 'txMode'), param('double', 'ber')],
is_const=True)
## error-rate-model.h (module 'wifi'): double ns3::ErrorRateModel::GetChunkSuccessRate(ns3::WifiMode mode, double snr, uint32_t nbits) const [member function]
cls.add_method('GetChunkSuccessRate',
'double',
[param('ns3::WifiMode', 'mode'), param('double', 'snr'), param('uint32_t', 'nbits')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## error-rate-model.h (module 'wifi'): static ns3::TypeId ns3::ErrorRateModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExtendedSupportedRatesIE_methods(root_module, cls):
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE(ns3::ExtendedSupportedRatesIE const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ExtendedSupportedRatesIE const &', 'arg0')])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE() [constructor]
cls.add_constructor([])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE(ns3::SupportedRates * rates) [constructor]
cls.add_constructor([param('ns3::SupportedRates *', 'rates')])
## supported-rates.h (module 'wifi'): uint8_t ns3::ExtendedSupportedRatesIE::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## supported-rates.h (module 'wifi'): ns3::WifiInformationElementId ns3::ExtendedSupportedRatesIE::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::ExtendedSupportedRatesIE::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint16_t ns3::ExtendedSupportedRatesIE::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint16_t',
[],
is_const=True)
## supported-rates.h (module 'wifi'): ns3::Buffer::Iterator ns3::ExtendedSupportedRatesIE::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## supported-rates.h (module 'wifi'): void ns3::ExtendedSupportedRatesIE::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
return
def register_Ns3FixedRssLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::FixedRssLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::FixedRssLossModel::FixedRssLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::FixedRssLossModel::SetRss(double rss) [member function]
cls.add_method('SetRss',
'void',
[param('double', 'rss')])
## propagation-loss-model.h (module 'propagation'): double ns3::FixedRssLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3FriisPropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::FriisPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::FriisPropagationLossModel::FriisPropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetLambda(double frequency, double speed) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'frequency'), param('double', 'speed')])
## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetLambda(double lambda) [member function]
cls.add_method('SetLambda',
'void',
[param('double', 'lambda')])
## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetSystemLoss(double systemLoss) [member function]
cls.add_method('SetSystemLoss',
'void',
[param('double', 'systemLoss')])
## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetMinDistance(double minDistance) [member function]
cls.add_method('SetMinDistance',
'void',
[param('double', 'minDistance')])
## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetMinDistance() const [member function]
cls.add_method('GetMinDistance',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetSystemLoss() const [member function]
cls.add_method('GetSystemLoss',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3IdealWifiManager_methods(root_module, cls):
## ideal-wifi-manager.h (module 'wifi'): ns3::IdealWifiManager::IdealWifiManager(ns3::IdealWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IdealWifiManager const &', 'arg0')])
## ideal-wifi-manager.h (module 'wifi'): ns3::IdealWifiManager::IdealWifiManager() [constructor]
cls.add_constructor([])
## ideal-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::IdealWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::SetupPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetupPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::IdealWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::IdealWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::IdealWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): void ns3::IdealWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## ideal-wifi-manager.h (module 'wifi'): bool ns3::IdealWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3JakesPropagationLossModel_methods(root_module, cls):
## jakes-propagation-loss-model.h (module 'propagation'): ns3::JakesPropagationLossModel::JakesPropagationLossModel() [constructor]
cls.add_constructor([])
## jakes-propagation-loss-model.h (module 'propagation'): uint8_t ns3::JakesPropagationLossModel::GetNOscillators() const [member function]
cls.add_method('GetNOscillators',
'uint8_t',
[],
is_const=True)
## jakes-propagation-loss-model.h (module 'propagation'): uint8_t ns3::JakesPropagationLossModel::GetNRays() const [member function]
cls.add_method('GetNRays',
'uint8_t',
[],
is_const=True)
## jakes-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::JakesPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## jakes-propagation-loss-model.h (module 'propagation'): void ns3::JakesPropagationLossModel::SetNOscillators(uint8_t nOscillators) [member function]
cls.add_method('SetNOscillators',
'void',
[param('uint8_t', 'nOscillators')])
## jakes-propagation-loss-model.h (module 'propagation'): void ns3::JakesPropagationLossModel::SetNRays(uint8_t nRays) [member function]
cls.add_method('SetNRays',
'void',
[param('uint8_t', 'nRays')])
## jakes-propagation-loss-model.h (module 'propagation'): double ns3::JakesPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3LogDistancePropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::LogDistancePropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::LogDistancePropagationLossModel::LogDistancePropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::LogDistancePropagationLossModel::SetPathLossExponent(double n) [member function]
cls.add_method('SetPathLossExponent',
'void',
[param('double', 'n')])
## propagation-loss-model.h (module 'propagation'): double ns3::LogDistancePropagationLossModel::GetPathLossExponent() const [member function]
cls.add_method('GetPathLossExponent',
'double',
[],
is_const=True)
## propagation-loss-model.h (module 'propagation'): void ns3::LogDistancePropagationLossModel::SetReference(double referenceDistance, double referenceLoss) [member function]
cls.add_method('SetReference',
'void',
[param('double', 'referenceDistance'), param('double', 'referenceLoss')])
## propagation-loss-model.h (module 'propagation'): double ns3::LogDistancePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3MacLow_methods(root_module, cls):
## mac-low.h (module 'wifi'): ns3::MacLow::MacLow(ns3::MacLow const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MacLow const &', 'arg0')])
## mac-low.h (module 'wifi'): ns3::MacLow::MacLow() [constructor]
cls.add_constructor([])
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::CalculateTransmissionTime(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const * hdr, ns3::MacLowTransmissionParameters const & parameters) const [member function]
cls.add_method('CalculateTransmissionTime',
'ns3::Time',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr'), param('ns3::MacLowTransmissionParameters const &', 'parameters')],
is_const=True)
## mac-low.h (module 'wifi'): void ns3::MacLow::CreateBlockAckAgreement(ns3::MgtAddBaResponseHeader const * respHdr, ns3::Mac48Address originator, uint16_t startingSeq) [member function]
cls.add_method('CreateBlockAckAgreement',
'void',
[param('ns3::MgtAddBaResponseHeader const *', 'respHdr'), param('ns3::Mac48Address', 'originator'), param('uint16_t', 'startingSeq')])
## mac-low.h (module 'wifi'): void ns3::MacLow::DestroyBlockAckAgreement(ns3::Mac48Address originator, uint8_t tid) [member function]
cls.add_method('DestroyBlockAckAgreement',
'void',
[param('ns3::Mac48Address', 'originator'), param('uint8_t', 'tid')])
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetAckTimeout() const [member function]
cls.add_method('GetAckTimeout',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Mac48Address ns3::MacLow::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetBasicBlockAckTimeout() const [member function]
cls.add_method('GetBasicBlockAckTimeout',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Mac48Address ns3::MacLow::GetBssid() const [member function]
cls.add_method('GetBssid',
'ns3::Mac48Address',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetCompressedBlockAckTimeout() const [member function]
cls.add_method('GetCompressedBlockAckTimeout',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetCtsTimeout() const [member function]
cls.add_method('GetCtsTimeout',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetPifs() const [member function]
cls.add_method('GetPifs',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetSifs() const [member function]
cls.add_method('GetSifs',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): ns3::Time ns3::MacLow::GetSlotTime() const [member function]
cls.add_method('GetSlotTime',
'ns3::Time',
[],
is_const=True)
## mac-low.h (module 'wifi'): void ns3::MacLow::NotifySwitchingStartNow(ns3::Time duration) [member function]
cls.add_method('NotifySwitchingStartNow',
'void',
[param('ns3::Time', 'duration')])
## mac-low.h (module 'wifi'): void ns3::MacLow::ReceiveError(ns3::Ptr<ns3::Packet const> packet, double rxSnr) [member function]
cls.add_method('ReceiveError',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('double', 'rxSnr')])
## mac-low.h (module 'wifi'): void ns3::MacLow::ReceiveOk(ns3::Ptr<ns3::Packet> packet, double rxSnr, ns3::WifiMode txMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('ReceiveOk',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode'), param('ns3::WifiPreamble', 'preamble')])
## mac-low.h (module 'wifi'): void ns3::MacLow::RegisterBlockAckListenerForAc(ns3::AcIndex ac, ns3::MacLowBlockAckEventListener * listener) [member function]
cls.add_method('RegisterBlockAckListenerForAc',
'void',
[param('ns3::AcIndex', 'ac'), param('ns3::MacLowBlockAckEventListener *', 'listener')])
## mac-low.h (module 'wifi'): void ns3::MacLow::RegisterDcfListener(ns3::MacLowDcfListener * listener) [member function]
cls.add_method('RegisterDcfListener',
'void',
[param('ns3::MacLowDcfListener *', 'listener')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetAckTimeout(ns3::Time ackTimeout) [member function]
cls.add_method('SetAckTimeout',
'void',
[param('ns3::Time', 'ackTimeout')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetAddress(ns3::Mac48Address ad) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'ad')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetBasicBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetBasicBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetBssid(ns3::Mac48Address ad) [member function]
cls.add_method('SetBssid',
'void',
[param('ns3::Mac48Address', 'ad')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetCompressedBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetCompressedBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetCtsTimeout(ns3::Time ctsTimeout) [member function]
cls.add_method('SetCtsTimeout',
'void',
[param('ns3::Time', 'ctsTimeout')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetPifs(ns3::Time pifs) [member function]
cls.add_method('SetPifs',
'void',
[param('ns3::Time', 'pifs')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetPromisc() [member function]
cls.add_method('SetPromisc',
'void',
[])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetRxCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::WifiMacHeader const*, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetRxCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetSlotTime(ns3::Time slotTime) [member function]
cls.add_method('SetSlotTime',
'void',
[param('ns3::Time', 'slotTime')])
## mac-low.h (module 'wifi'): void ns3::MacLow::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> manager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'manager')])
## mac-low.h (module 'wifi'): void ns3::MacLow::StartTransmission(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const * hdr, ns3::MacLowTransmissionParameters parameters, ns3::MacLowTransmissionListener * listener) [member function]
cls.add_method('StartTransmission',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr'), param('ns3::MacLowTransmissionParameters', 'parameters'), param('ns3::MacLowTransmissionListener *', 'listener')])
## mac-low.h (module 'wifi'): void ns3::MacLow::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3MatrixPropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::MatrixPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::MatrixPropagationLossModel::MatrixPropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): void ns3::MatrixPropagationLossModel::SetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b, double loss, bool symmetric=true) [member function]
cls.add_method('SetLoss',
'void',
[param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b'), param('double', 'loss'), param('bool', 'symmetric', default_value='true')])
## propagation-loss-model.h (module 'propagation'): void ns3::MatrixPropagationLossModel::SetDefaultLoss(double arg0) [member function]
cls.add_method('SetDefaultLoss',
'void',
[param('double', 'arg0')])
## propagation-loss-model.h (module 'propagation'): double ns3::MatrixPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3MgtBeaconHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader::MgtBeaconHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader::MgtBeaconHeader(ns3::MgtBeaconHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtBeaconHeader const &', 'arg0')])
return
def register_Ns3MinstrelWifiManager_methods(root_module, cls):
## minstrel-wifi-manager.h (module 'wifi'): ns3::MinstrelWifiManager::MinstrelWifiManager(ns3::MinstrelWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MinstrelWifiManager const &', 'arg0')])
## minstrel-wifi-manager.h (module 'wifi'): ns3::MinstrelWifiManager::MinstrelWifiManager() [constructor]
cls.add_constructor([])
## minstrel-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::MinstrelWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::SetupPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetupPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::MinstrelWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::MinstrelWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::MinstrelWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): void ns3::MinstrelWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## minstrel-wifi-manager.h (module 'wifi'): bool ns3::MinstrelWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3MsduAggregator_methods(root_module, cls):
## msdu-aggregator.h (module 'wifi'): ns3::MsduAggregator::MsduAggregator() [constructor]
cls.add_constructor([])
## msdu-aggregator.h (module 'wifi'): ns3::MsduAggregator::MsduAggregator(ns3::MsduAggregator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MsduAggregator const &', 'arg0')])
## msdu-aggregator.h (module 'wifi'): bool ns3::MsduAggregator::Aggregate(ns3::Ptr<ns3::Packet const> packet, ns3::Ptr<ns3::Packet> aggregatedPacket, ns3::Mac48Address src, ns3::Mac48Address dest) [member function]
cls.add_method('Aggregate',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Ptr< ns3::Packet >', 'aggregatedPacket'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## msdu-aggregator.h (module 'wifi'): static std::list<std::pair<ns3::Ptr<ns3::Packet>, ns3::AmsduSubframeHeader>, std::allocator<std::pair<ns3::Ptr<ns3::Packet>, ns3::AmsduSubframeHeader> > > ns3::MsduAggregator::Deaggregate(ns3::Ptr<ns3::Packet> aggregatedPacket) [member function]
cls.add_method('Deaggregate',
'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >',
[param('ns3::Ptr< ns3::Packet >', 'aggregatedPacket')],
is_static=True)
## msdu-aggregator.h (module 'wifi'): static ns3::TypeId ns3::MsduAggregator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3NakagamiPropagationLossModel_methods(root_module, cls):
## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::NakagamiPropagationLossModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## propagation-loss-model.h (module 'propagation'): ns3::NakagamiPropagationLossModel::NakagamiPropagationLossModel() [constructor]
cls.add_constructor([])
## propagation-loss-model.h (module 'propagation'): double ns3::NakagamiPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function]
cls.add_method('DoCalcRxPower',
'double',
[param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NistErrorRateModel_methods(root_module, cls):
## nist-error-rate-model.h (module 'wifi'): ns3::NistErrorRateModel::NistErrorRateModel(ns3::NistErrorRateModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NistErrorRateModel const &', 'arg0')])
## nist-error-rate-model.h (module 'wifi'): ns3::NistErrorRateModel::NistErrorRateModel() [constructor]
cls.add_constructor([])
## nist-error-rate-model.h (module 'wifi'): double ns3::NistErrorRateModel::GetChunkSuccessRate(ns3::WifiMode mode, double snr, uint32_t nbits) const [member function]
cls.add_method('GetChunkSuccessRate',
'double',
[param('ns3::WifiMode', 'mode'), param('double', 'snr'), param('uint32_t', 'nbits')],
is_const=True, is_virtual=True)
## nist-error-rate-model.h (module 'wifi'): static ns3::TypeId ns3::NistErrorRateModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OnoeWifiManager_methods(root_module, cls):
## onoe-wifi-manager.h (module 'wifi'): ns3::OnoeWifiManager::OnoeWifiManager(ns3::OnoeWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OnoeWifiManager const &', 'arg0')])
## onoe-wifi-manager.h (module 'wifi'): ns3::OnoeWifiManager::OnoeWifiManager() [constructor]
cls.add_constructor([])
## onoe-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::OnoeWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## onoe-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::OnoeWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::OnoeWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::OnoeWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): void ns3::OnoeWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## onoe-wifi-manager.h (module 'wifi'): bool ns3::OnoeWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3RandomVariableChecker_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker(ns3::RandomVariableChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableChecker const &', 'arg0')])
return
def register_Ns3RandomVariableValue_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariableValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableValue const &', 'arg0')])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariable const & value) [constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'value')])
## random-variable.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::RandomVariableValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): bool ns3::RandomVariableValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## random-variable.h (module 'core'): ns3::RandomVariable ns3::RandomVariableValue::Get() const [member function]
cls.add_method('Get',
'ns3::RandomVariable',
[],
is_const=True)
## random-variable.h (module 'core'): std::string ns3::RandomVariableValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): void ns3::RandomVariableValue::Set(ns3::RandomVariable const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::RandomVariable const &', 'value')])
return
def register_Ns3RegularWifiMac_methods(root_module, cls):
## regular-wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::RegularWifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## regular-wifi-mac.h (module 'wifi'): ns3::RegularWifiMac::RegularWifiMac() [constructor]
cls.add_constructor([])
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSlot(ns3::Time slotTime) [member function]
cls.add_method('SetSlot',
'void',
[param('ns3::Time', 'slotTime')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetEifsNoDifs(ns3::Time eifsNoDifs) [member function]
cls.add_method('SetEifsNoDifs',
'void',
[param('ns3::Time', 'eifsNoDifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetPifs(ns3::Time pifs) [member function]
cls.add_method('SetPifs',
'void',
[param('ns3::Time', 'pifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetCtsTimeout(ns3::Time ctsTimeout) [member function]
cls.add_method('SetCtsTimeout',
'void',
[param('ns3::Time', 'ctsTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetAckTimeout(ns3::Time ackTimeout) [member function]
cls.add_method('SetAckTimeout',
'void',
[param('ns3::Time', 'ackTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetPifs() const [member function]
cls.add_method('GetPifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetSifs() const [member function]
cls.add_method('GetSifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetSlot() const [member function]
cls.add_method('GetSlot',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetEifsNoDifs() const [member function]
cls.add_method('GetEifsNoDifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetCtsTimeout() const [member function]
cls.add_method('GetCtsTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetAckTimeout() const [member function]
cls.add_method('GetAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::RegularWifiMac::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ssid ns3::RegularWifiMac::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetBssid(ns3::Mac48Address bssid) [member function]
cls.add_method('SetBssid',
'void',
[param('ns3::Mac48Address', 'bssid')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::RegularWifiMac::GetBssid() const [member function]
cls.add_method('GetBssid',
'ns3::Mac48Address',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetPromisc() [member function]
cls.add_method('SetPromisc',
'void',
[],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): bool ns3::RegularWifiMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_pure_virtual=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetWifiPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetWifiPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::RegularWifiMac::GetWifiPhy() const [member function]
cls.add_method('GetWifiPhy',
'ns3::Ptr< ns3::WifiPhy >',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> stationManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'stationManager')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ptr<ns3::WifiRemoteStationManager> ns3::RegularWifiMac::GetWifiRemoteStationManager() const [member function]
cls.add_method('GetWifiRemoteStationManager',
'ns3::Ptr< ns3::WifiRemoteStationManager >',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetForwardUpCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> upCallback) [member function]
cls.add_method('SetForwardUpCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'upCallback')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetLinkDownCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkDown) [member function]
cls.add_method('SetLinkDownCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkDown')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetBasicBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetBasicBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetBasicBlockAckTimeout() const [member function]
cls.add_method('GetBasicBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetCompressedBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetCompressedBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetCompressedBlockAckTimeout() const [member function]
cls.add_method('GetCompressedBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::FinishConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('FinishConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetTypeOfStation(ns3::TypeOfStation type) [member function]
cls.add_method('SetTypeOfStation',
'void',
[param('ns3::TypeOfStation', 'type')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::TxOk(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxOk',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::TxFailed(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxFailed',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::ForwardUp(ns3::Ptr<ns3::Packet> packet, ns3::Mac48Address from, ns3::Mac48Address to) [member function]
cls.add_method('ForwardUp',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Mac48Address', 'from'), param('ns3::Mac48Address', 'to')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DeaggregateAmsduAndForward(ns3::Ptr<ns3::Packet> aggregatedPacket, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('DeaggregateAmsduAndForward',
'void',
[param('ns3::Ptr< ns3::Packet >', 'aggregatedPacket'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SendAddBaResponse(ns3::MgtAddBaRequestHeader const * reqHdr, ns3::Mac48Address originator) [member function]
cls.add_method('SendAddBaResponse',
'void',
[param('ns3::MgtAddBaRequestHeader const *', 'reqHdr'), param('ns3::Mac48Address', 'originator')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetQosSupported(bool enable) [member function]
cls.add_method('SetQosSupported',
'void',
[param('bool', 'enable')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): bool ns3::RegularWifiMac::GetQosSupported() const [member function]
cls.add_method('GetQosSupported',
'bool',
[],
is_const=True, visibility='protected')
return
def register_Ns3RraaWifiManager_methods(root_module, cls):
## rraa-wifi-manager.h (module 'wifi'): ns3::RraaWifiManager::RraaWifiManager(ns3::RraaWifiManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RraaWifiManager const &', 'arg0')])
## rraa-wifi-manager.h (module 'wifi'): ns3::RraaWifiManager::RraaWifiManager() [constructor]
cls.add_constructor([])
## rraa-wifi-manager.h (module 'wifi'): static ns3::TypeId ns3::RraaWifiManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## rraa-wifi-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::RraaWifiManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_const=True, visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::RraaWifiManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): ns3::WifiMode ns3::RraaWifiManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): bool ns3::RraaWifiManager::DoNeedRts(ns3::WifiRemoteStation * st, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRts',
'bool',
[param('ns3::WifiRemoteStation *', 'st'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): void ns3::RraaWifiManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
visibility='private', is_virtual=True)
## rraa-wifi-manager.h (module 'wifi'): bool ns3::RraaWifiManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ssid_methods(root_module, cls):
cls.add_output_stream_operator()
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(ns3::Ssid const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ssid const &', 'arg0')])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(std::string s) [constructor]
cls.add_constructor([param('std::string', 's')])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(char const * ssid, uint8_t length) [constructor]
cls.add_constructor([param('char const *', 'ssid'), param('uint8_t', 'length')])
## ssid.h (module 'wifi'): uint8_t ns3::Ssid::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ssid.h (module 'wifi'): ns3::WifiInformationElementId ns3::Ssid::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): uint8_t ns3::Ssid::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): bool ns3::Ssid::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ssid.h (module 'wifi'): bool ns3::Ssid::IsEqual(ns3::Ssid const & o) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ssid const &', 'o')],
is_const=True)
## ssid.h (module 'wifi'): char * ns3::Ssid::PeekString() const [member function]
cls.add_method('PeekString',
'char *',
[],
is_const=True)
## ssid.h (module 'wifi'): void ns3::Ssid::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
return
def register_Ns3SsidChecker_methods(root_module, cls):
## ssid.h (module 'wifi'): ns3::SsidChecker::SsidChecker() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::SsidChecker::SsidChecker(ns3::SsidChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SsidChecker const &', 'arg0')])
return
def register_Ns3SsidValue_methods(root_module, cls):
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue(ns3::SsidValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SsidValue const &', 'arg0')])
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue(ns3::Ssid const & value) [constructor]
cls.add_constructor([param('ns3::Ssid const &', 'value')])
## ssid.h (module 'wifi'): ns3::Ptr<ns3::AttributeValue> ns3::SsidValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): bool ns3::SsidValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ssid.h (module 'wifi'): ns3::Ssid ns3::SsidValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ssid',
[],
is_const=True)
## ssid.h (module 'wifi'): std::string ns3::SsidValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): void ns3::SsidValue::Set(ns3::Ssid const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ssid const &', 'value')])
return
def register_Ns3StaWifiMac_methods(root_module, cls):
## sta-wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::StaWifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sta-wifi-mac.h (module 'wifi'): ns3::StaWifiMac::StaWifiMac() [constructor]
cls.add_constructor([])
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_virtual=True)
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::SetMaxMissedBeacons(uint32_t missed) [member function]
cls.add_method('SetMaxMissedBeacons',
'void',
[param('uint32_t', 'missed')])
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::SetProbeRequestTimeout(ns3::Time timeout) [member function]
cls.add_method('SetProbeRequestTimeout',
'void',
[param('ns3::Time', 'timeout')])
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::SetAssocRequestTimeout(ns3::Time timeout) [member function]
cls.add_method('SetAssocRequestTimeout',
'void',
[param('ns3::Time', 'timeout')])
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::StartActiveAssociation() [member function]
cls.add_method('StartActiveAssociation',
'void',
[])
## sta-wifi-mac.h (module 'wifi'): void ns3::StaWifiMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='private', is_virtual=True)
return
def register_Ns3SupportedRates_methods(root_module, cls):
cls.add_output_stream_operator()
## supported-rates.h (module 'wifi'): ns3::SupportedRates::SupportedRates(ns3::SupportedRates const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SupportedRates const &', 'arg0')])
## supported-rates.h (module 'wifi'): ns3::SupportedRates::SupportedRates() [constructor]
cls.add_constructor([])
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::AddSupportedRate(uint32_t bs) [member function]
cls.add_method('AddSupportedRate',
'void',
[param('uint32_t', 'bs')])
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## supported-rates.h (module 'wifi'): ns3::WifiInformationElementId ns3::SupportedRates::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::GetNRates() const [member function]
cls.add_method('GetNRates',
'uint8_t',
[],
is_const=True)
## supported-rates.h (module 'wifi'): uint32_t ns3::SupportedRates::GetRate(uint8_t i) const [member function]
cls.add_method('GetRate',
'uint32_t',
[param('uint8_t', 'i')],
is_const=True)
## supported-rates.h (module 'wifi'): bool ns3::SupportedRates::IsBasicRate(uint32_t bs) const [member function]
cls.add_method('IsBasicRate',
'bool',
[param('uint32_t', 'bs')],
is_const=True)
## supported-rates.h (module 'wifi'): bool ns3::SupportedRates::IsSupportedRate(uint32_t bs) const [member function]
cls.add_method('IsSupportedRate',
'bool',
[param('uint32_t', 'bs')],
is_const=True)
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::SetBasicRate(uint32_t bs) [member function]
cls.add_method('SetBasicRate',
'void',
[param('uint32_t', 'bs')])
## supported-rates.h (module 'wifi'): ns3::SupportedRates::extended [variable]
cls.add_instance_attribute('extended', 'ns3::ExtendedSupportedRatesIE', is_const=False)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3UintegerValue_methods(root_module, cls):
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor]
cls.add_constructor([])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor]
cls.add_constructor([param('uint64_t const &', 'value')])
## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function]
cls.add_method('Get',
'uint64_t',
[],
is_const=True)
## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('uint64_t const &', 'value')])
return
def register_Ns3WifiChannel_methods(root_module, cls):
## wifi-channel.h (module 'wifi'): ns3::WifiChannel::WifiChannel() [constructor]
cls.add_constructor([])
## wifi-channel.h (module 'wifi'): ns3::WifiChannel::WifiChannel(ns3::WifiChannel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiChannel const &', 'arg0')])
## wifi-channel.h (module 'wifi'): static ns3::TypeId ns3::WifiChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3WifiModeChecker_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker::WifiModeChecker() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker::WifiModeChecker(ns3::WifiModeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeChecker const &', 'arg0')])
return
def register_Ns3WifiModeValue_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue(ns3::WifiModeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeValue const &', 'arg0')])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue(ns3::WifiMode const & value) [constructor]
cls.add_constructor([param('ns3::WifiMode const &', 'value')])
## wifi-mode.h (module 'wifi'): ns3::Ptr<ns3::AttributeValue> ns3::WifiModeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## wifi-mode.h (module 'wifi'): bool ns3::WifiModeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## wifi-mode.h (module 'wifi'): ns3::WifiMode ns3::WifiModeValue::Get() const [member function]
cls.add_method('Get',
'ns3::WifiMode',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): std::string ns3::WifiModeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## wifi-mode.h (module 'wifi'): void ns3::WifiModeValue::Set(ns3::WifiMode const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::WifiMode const &', 'value')])
return
def register_Ns3WifiNetDevice_methods(root_module, cls):
## wifi-net-device.h (module 'wifi'): ns3::WifiNetDevice::WifiNetDevice(ns3::WifiNetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiNetDevice const &', 'arg0')])
## wifi-net-device.h (module 'wifi'): ns3::WifiNetDevice::WifiNetDevice() [constructor]
cls.add_constructor([])
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Address ns3::WifiNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Address ns3::WifiNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Ptr<ns3::Channel> ns3::WifiNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): uint32_t ns3::WifiNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Ptr<ns3::WifiMac> ns3::WifiNetDevice::GetMac() const [member function]
cls.add_method('GetMac',
'ns3::Ptr< ns3::WifiMac >',
[],
is_const=True)
## wifi-net-device.h (module 'wifi'): uint16_t ns3::WifiNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Address ns3::WifiNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Address ns3::WifiNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Ptr<ns3::Node> ns3::WifiNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::WifiNetDevice::GetPhy() const [member function]
cls.add_method('GetPhy',
'ns3::Ptr< ns3::WifiPhy >',
[],
is_const=True)
## wifi-net-device.h (module 'wifi'): ns3::Ptr<ns3::WifiRemoteStationManager> ns3::WifiNetDevice::GetRemoteStationManager() const [member function]
cls.add_method('GetRemoteStationManager',
'ns3::Ptr< ns3::WifiRemoteStationManager >',
[],
is_const=True)
## wifi-net-device.h (module 'wifi'): static ns3::TypeId ns3::WifiNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetMac(ns3::Ptr<ns3::WifiMac> mac) [member function]
cls.add_method('SetMac',
'void',
[param('ns3::Ptr< ns3::WifiMac >', 'mac')])
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')])
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::SetRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> manager) [member function]
cls.add_method('SetRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'manager')])
## wifi-net-device.h (module 'wifi'): bool ns3::WifiNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## wifi-net-device.h (module 'wifi'): void ns3::WifiNetDevice::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3YansErrorRateModel_methods(root_module, cls):
## yans-error-rate-model.h (module 'wifi'): ns3::YansErrorRateModel::YansErrorRateModel(ns3::YansErrorRateModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::YansErrorRateModel const &', 'arg0')])
## yans-error-rate-model.h (module 'wifi'): ns3::YansErrorRateModel::YansErrorRateModel() [constructor]
cls.add_constructor([])
## yans-error-rate-model.h (module 'wifi'): double ns3::YansErrorRateModel::GetChunkSuccessRate(ns3::WifiMode mode, double snr, uint32_t nbits) const [member function]
cls.add_method('GetChunkSuccessRate',
'double',
[param('ns3::WifiMode', 'mode'), param('double', 'snr'), param('uint32_t', 'nbits')],
is_const=True, is_virtual=True)
## yans-error-rate-model.h (module 'wifi'): static ns3::TypeId ns3::YansErrorRateModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3YansWifiChannel_methods(root_module, cls):
## yans-wifi-channel.h (module 'wifi'): static ns3::TypeId ns3::YansWifiChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## yans-wifi-channel.h (module 'wifi'): ns3::YansWifiChannel::YansWifiChannel() [constructor]
cls.add_constructor([])
## yans-wifi-channel.h (module 'wifi'): uint32_t ns3::YansWifiChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## yans-wifi-channel.h (module 'wifi'): ns3::Ptr<ns3::NetDevice> ns3::YansWifiChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## yans-wifi-channel.h (module 'wifi'): void ns3::YansWifiChannel::Add(ns3::Ptr<ns3::YansWifiPhy> phy) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::YansWifiPhy >', 'phy')])
## yans-wifi-channel.h (module 'wifi'): void ns3::YansWifiChannel::SetPropagationLossModel(ns3::Ptr<ns3::PropagationLossModel> loss) [member function]
cls.add_method('SetPropagationLossModel',
'void',
[param('ns3::Ptr< ns3::PropagationLossModel >', 'loss')])
## yans-wifi-channel.h (module 'wifi'): void ns3::YansWifiChannel::SetPropagationDelayModel(ns3::Ptr<ns3::PropagationDelayModel> delay) [member function]
cls.add_method('SetPropagationDelayModel',
'void',
[param('ns3::Ptr< ns3::PropagationDelayModel >', 'delay')])
## yans-wifi-channel.h (module 'wifi'): void ns3::YansWifiChannel::Send(ns3::Ptr<ns3::YansWifiPhy> sender, ns3::Ptr<ns3::Packet const> packet, double txPowerDbm, ns3::WifiMode wifiMode, ns3::WifiPreamble preamble) const [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::YansWifiPhy >', 'sender'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('double', 'txPowerDbm'), param('ns3::WifiMode', 'wifiMode'), param('ns3::WifiPreamble', 'preamble')],
is_const=True)
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3AdhocWifiMac_methods(root_module, cls):
## adhoc-wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::AdhocWifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## adhoc-wifi-mac.h (module 'wifi'): ns3::AdhocWifiMac::AdhocWifiMac() [constructor]
cls.add_constructor([])
## adhoc-wifi-mac.h (module 'wifi'): void ns3::AdhocWifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_virtual=True)
## adhoc-wifi-mac.h (module 'wifi'): void ns3::AdhocWifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_virtual=True)
## adhoc-wifi-mac.h (module 'wifi'): void ns3::AdhocWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_virtual=True)
## adhoc-wifi-mac.h (module 'wifi'): void ns3::AdhocWifiMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='private', is_virtual=True)
return
def register_Ns3ApWifiMac_methods(root_module, cls):
## ap-wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::ApWifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ap-wifi-mac.h (module 'wifi'): ns3::ApWifiMac::ApWifiMac() [constructor]
cls.add_constructor([])
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> stationManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'stationManager')],
is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): bool ns3::ApWifiMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::SetBeaconInterval(ns3::Time interval) [member function]
cls.add_method('SetBeaconInterval',
'void',
[param('ns3::Time', 'interval')])
## ap-wifi-mac.h (module 'wifi'): ns3::Time ns3::ApWifiMac::GetBeaconInterval() const [member function]
cls.add_method('GetBeaconInterval',
'ns3::Time',
[],
is_const=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::StartBeaconing() [member function]
cls.add_method('StartBeaconing',
'void',
[])
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='private', is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::TxOk(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxOk',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='private', is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::TxFailed(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxFailed',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='private', is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::DeaggregateAmsduAndForward(ns3::Ptr<ns3::Packet> aggregatedPacket, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('DeaggregateAmsduAndForward',
'void',
[param('ns3::Ptr< ns3::Packet >', 'aggregatedPacket'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='private', is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## ap-wifi-mac.h (module 'wifi'): void ns3::ApWifiMac::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3DcaTxop_methods(root_module, cls):
## dca-txop.h (module 'wifi'): static ns3::TypeId ns3::DcaTxop::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dca-txop.h (module 'wifi'): ns3::DcaTxop::DcaTxop() [constructor]
cls.add_constructor([])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetLow(ns3::Ptr<ns3::MacLow> low) [member function]
cls.add_method('SetLow',
'void',
[param('ns3::Ptr< ns3::MacLow >', 'low')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetManager(ns3::DcfManager * manager) [member function]
cls.add_method('SetManager',
'void',
[param('ns3::DcfManager *', 'manager')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> remoteManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'remoteManager')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetTxOkCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxOkCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetTxFailedCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxFailedCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## dca-txop.h (module 'wifi'): ns3::Ptr<ns3::WifiMacQueue> ns3::DcaTxop::GetQueue() const [member function]
cls.add_method('GetQueue',
'ns3::Ptr< ns3::WifiMacQueue >',
[],
is_const=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::Queue(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('Queue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
## ssid.h (module 'wifi'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeSsidChecker() [free function]
module.add_function('MakeSsidChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## wifi-mode.h (module 'wifi'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeWifiModeChecker() [free function]
module.add_function('MakeWifiModeChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## qos-utils.h (module 'wifi'): extern uint8_t ns3::QosUtilsGetTidForPacket(ns3::Ptr<ns3::Packet const> packet) [free function]
module.add_function('QosUtilsGetTidForPacket',
'uint8_t',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## qos-utils.h (module 'wifi'): extern bool ns3::QosUtilsIsOldPacket(uint16_t startingSeq, uint16_t seqNumber) [free function]
module.add_function('QosUtilsIsOldPacket',
'bool',
[param('uint16_t', 'startingSeq'), param('uint16_t', 'seqNumber')])
## qos-utils.h (module 'wifi'): extern uint32_t ns3::QosUtilsMapSeqControlToUniqueInteger(uint16_t seqControl, uint16_t endSequence) [free function]
module.add_function('QosUtilsMapSeqControlToUniqueInteger',
'uint32_t',
[param('uint16_t', 'seqControl'), param('uint16_t', 'endSequence')])
## qos-utils.h (module 'wifi'): extern ns3::AcIndex ns3::QosUtilsMapTidToAc(uint8_t tid) [free function]
module.add_function('QosUtilsMapTidToAc',
'ns3::AcIndex',
[param('uint8_t', 'tid')])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 |
stephen144/odoo | addons/gamification/tests/test_challenge.py | 39 | 4253 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.tests import common
class test_challenge(common.TransactionCase):
def setUp(self):
super(test_challenge, self).setUp()
cr, uid = self.cr, self.uid
self.data_obj = self.registry('ir.model.data')
self.user_obj = self.registry('res.users')
self.challenge_obj = self.registry('gamification.challenge')
self.line_obj = self.registry('gamification.challenge.line')
self.goal_obj = self.registry('gamification.goal')
self.badge_obj = self.registry('gamification.badge')
self.badge_user_obj = self.registry('gamification.badge.user')
self.demo_user_id = self.data_obj.get_object_reference(cr, uid, 'base', 'user_demo')[1]
self.group_user_id = self.data_obj.get_object_reference(cr, uid, 'base', 'group_user')[1]
self.challenge_base_id = self.data_obj.get_object_reference(cr, uid, 'gamification', 'challenge_base_discover')[1]
self.definition_timezone_id = self.data_obj.get_object_reference(cr, uid, 'gamification', 'definition_base_timezone')[1]
self.badge_id = self.data_obj.get_object_reference(cr, uid, 'gamification', 'badge_good_job')[1]
def test_00_join_challenge(self):
cr, uid, context = self.cr, self.uid, {}
user_ids = self.user_obj.search(cr, uid, [('groups_id', '=', self.group_user_id)])
challenge = self.challenge_obj.browse(cr, uid, self.challenge_base_id, context=context)
self.assertGreaterEqual(len(challenge.user_ids), len(user_ids), "Not enough users in base challenge")
self.user_obj.create(cr, uid, {
'name': 'R2D2',
'login': '[email protected]',
'email': '[email protected]',
'groups_id': [(6, 0, [self.group_user_id])]
}, {'no_reset_password': True})
self.challenge_obj._update_all(cr, uid, [self.challenge_base_id], context=context)
challenge = self.challenge_obj.browse(cr, uid, self.challenge_base_id, context=context)
self.assertGreaterEqual(len(challenge.user_ids), len(user_ids)+1, "These are not droids you are looking for")
def test_10_reach_challenge(self):
cr, uid, context = self.cr, self.uid, {}
self.challenge_obj.write(cr, uid, [self.challenge_base_id], {'state': 'inprogress'}, context=context)
challenge = self.challenge_obj.browse(cr, uid, self.challenge_base_id, context=context)
challenge_user_ids = [user.id for user in challenge.user_ids]
self.assertEqual(challenge.state, 'inprogress', "Challenge failed the change of state")
line_ids = self.line_obj.search(cr, uid, [('challenge_id', '=', self.challenge_base_id)], context=context)
goal_ids = self.goal_obj.search(cr, uid, [('challenge_id', '=', self.challenge_base_id), ('state', '!=', 'draft')], context=context)
self.assertEqual(len(goal_ids), len(line_ids)*len(challenge_user_ids), "Incorrect number of goals generated, should be 1 goal per user, per challenge line")
# demo user will set a timezone
self.user_obj.write(cr, uid, self.demo_user_id, {'tz': "Europe/Brussels"}, context=context)
goal_ids = self.goal_obj.search(cr, uid, [('user_id', '=', self.demo_user_id), ('definition_id', '=', self.definition_timezone_id)], context=context)
self.goal_obj.update(cr, uid, goal_ids, context=context)
reached_goal_ids = self.goal_obj.search(cr, uid, [('id', 'in', goal_ids), ('state', '=', 'reached')], context=context)
self.assertEqual(set(goal_ids), set(reached_goal_ids), "Not every goal was reached after changing timezone")
# reward for two firsts as admin may have timezone
self.challenge_obj.write(cr, uid, self.challenge_base_id, {'reward_first_id': self.badge_id, 'reward_second_id': self.badge_id}, context=context)
self.challenge_obj.write(cr, uid, self.challenge_base_id, {'state': 'done'}, context=context)
badge_ids = self.badge_user_obj.search(cr, uid, [('badge_id', '=', self.badge_id), ('user_id', '=', self.demo_user_id)])
self.assertGreater(len(badge_ids), 0, "Demo user has not received the badge") | agpl-3.0 |
ericmok/eri53 | learn/libsvm-3.12/tools/checkdata.py | 144 | 2479 | #!/usr/bin/env python
#
# A format checker for LIBSVM
#
#
# Copyright (c) 2007, Rong-En Fan
#
# All rights reserved.
#
# This program is distributed under the same license of the LIBSVM package.
#
from sys import argv, exit
import os.path
def err(line_no, msg):
print("line {0}: {1}".format(line_no, msg))
# works like float() but does not accept nan and inf
def my_float(x):
if x.lower().find("nan") != -1 or x.lower().find("inf") != -1:
raise ValueError
return float(x)
def main():
if len(argv) != 2:
print("Usage: {0} dataset".format(argv[0]))
exit(1)
dataset = argv[1]
if not os.path.exists(dataset):
print("dataset {0} not found".format(dataset))
exit(1)
line_no = 1
error_line_count = 0
for line in open(dataset, 'r'):
line_error = False
# each line must end with a newline character
if line[-1] != '\n':
err(line_no, "missing a newline character in the end")
line_error = True
nodes = line.split()
# check label
try:
label = nodes.pop(0)
if label.find(',') != -1:
# multi-label format
try:
for l in label.split(','):
l = my_float(l)
except:
err(line_no, "label {0} is not a valid multi-label form".format(label))
line_error = True
else:
try:
label = my_float(label)
except:
err(line_no, "label {0} is not a number".format(label))
line_error = True
except:
err(line_no, "missing label, perhaps an empty line?")
line_error = True
# check features
prev_index = -1
for i in range(len(nodes)):
try:
(index, value) = nodes[i].split(':')
index = int(index)
value = my_float(value)
# precomputed kernel's index starts from 0 and LIBSVM
# checks it. Hence, don't treat index 0 as an error.
if index < 0:
err(line_no, "feature index must be positive; wrong feature {0}".format(nodes[i]))
line_error = True
elif index <= prev_index:
err(line_no, "feature indices must be in an ascending order, previous/current features {0} {1}".format(nodes[i-1], nodes[i]))
line_error = True
prev_index = index
except:
err(line_no, "feature '{0}' not an <index>:<value> pair, <index> integer, <value> real number ".format(nodes[i]))
line_error = True
line_no += 1
if line_error:
error_line_count += 1
if error_line_count > 0:
print("Found {0} lines with error.".format(error_line_count))
return 1
else:
print("No error.")
return 0
if __name__ == "__main__":
exit(main())
| unlicense |
ekeih/libtado | docs/source/conf.py | 1 | 4718 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# libtado documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 16 17:11:41 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['ntemplates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'libtado'
copyright = '2017, Max Rosin'
author = 'Max Rosin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.0.0'
# The full version, including alpha/beta/rc tags.
release = '2.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['nstatic']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'libtadodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'libtado.tex', 'libtado Documentation',
'Max Rosin', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'libtado', 'libtado Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'libtado', 'libtado Documentation',
author, 'libtado', 'One line description of project.',
'Miscellaneous'),
]
| gpl-3.0 |
jswope00/GAI | common/djangoapps/user_api/tests/test_models.py | 52 | 1757 | from django.db import IntegrityError
from django.test import TestCase
from student.tests.factories import UserFactory
from user_api.tests.factories import UserPreferenceFactory
from user_api.models import UserPreference
class UserPreferenceModelTest(TestCase):
def test_duplicate_user_key(self):
user = UserFactory.create()
UserPreferenceFactory.create(user=user, key="testkey", value="first")
self.assertRaises(
IntegrityError,
UserPreferenceFactory.create,
user=user,
key="testkey",
value="second"
)
def test_arbitrary_values(self):
user = UserFactory.create()
UserPreferenceFactory.create(user=user, key="testkey0", value="")
UserPreferenceFactory.create(user=user, key="testkey1", value="This is some English text!")
UserPreferenceFactory.create(user=user, key="testkey2", value="{'some': 'json'}")
UserPreferenceFactory.create(
user=user,
key="testkey3",
value="\xe8\xbf\x99\xe6\x98\xaf\xe4\xb8\xad\xe5\x9b\xbd\xe6\x96\x87\xe5\xad\x97'"
)
def test_get_set_preference(self):
# Checks that you can set a preference and get that preference later
# Also, tests that no preference is returned for keys that are not set
user = UserFactory.create()
key = 'testkey'
value = 'testvalue'
# does a round trip
UserPreference.set_preference(user, key, value)
pref = UserPreference.get_preference(user, key)
self.assertEqual(pref, value)
# get preference for key that doesn't exist for user
pref = UserPreference.get_preference(user, 'testkey_none')
self.assertIsNone(pref)
| agpl-3.0 |
er432/TASSELpy | TASSELpy/net/maizegenetics/trait/CombinePhenotype.py | 1 | 4297 | from TASSELpy.utils.helper import make_sig
from TASSELpy.utils.Overloading import javaOverload, javaConstructorOverload, javaStaticOverload
from TASSELpy.net.maizegenetics.trait.AbstractPhenotype import AbstractPhenotype
from TASSELpy.net.maizegenetics.trait.Phenotype import Phenotype
from TASSELpy.net.maizegenetics.trait.SimplePhenotype import SimplePhenotype
from TASSELpy.net.maizegenetics.trait import Trait
from TASSELpy.net.maizegenetics.taxa.Taxon import Taxon
from TASSELpy.java.lang.Boolean import metaBoolean
from TASSELpy.java.lang.Integer import metaInteger
from TASSELpy.java.lang.Double import metaDouble
from TASSELpy.javaObj import javaArray
java_imports = {'CombinePhenotype': 'net/maizegenetics/trait/CombinePhenotype',
'Phenotype':'net/maizegenetics/trait/Phenotype',
'SimplePhenotype':'net/maizegenetics/trait/SimplePhenotype',
'Taxon':'net/maizegenetics/taxa/Taxon',
'Trait':'net/maizegenetics/trait/Trait'}
class CombinePhenotype(AbstractPhenotype):
""" Class used for combining multiple phenotypes that may have different taxa
among them
"""
_java_name = java_imports['CombinePhenotype']
@javaConstructorOverload(java_imports['CombinePhenotype'])
def __init__(self, *args, **kwargs):
pass
@javaStaticOverload(java_imports['CombinePhenotype'],"getInstance",
(make_sig([java_imports['Phenotype'],java_imports['Phenotype'],'boolean'],
java_imports['CombinePhenotype']),
(Phenotype,Phenotype,metaBoolean),
lambda x: CombinePhenotype(obj=x)),
(make_sig([java_imports['Phenotype']+'[]','boolean'],
java_imports['CombinePhenotype']),
(javaArray.get_array_type(Phenotype),metaBoolean),
lambda x: CombinePhenotype(obj=x)))
def getInstance(*args):
""" Gets an instance of CombinePhenotype, allowing combining of more than
one phenotype
Signatures:
static CombinePhenotype getInstance(Phenotype phenotype1, Phenotype phenotype2,
boolean isUnion)
static CombinePhenotype getInstance(Phenotype[] phenotypes, boolean isUnion)
Arguments:
static CombinePhenotype getInstance(Phenotype phenotype1, Phenotype phenotype2,
boolean isUnion)
phenotype1 -- The first phenotype
phenotype2 -- The second phenotype
isUnion -- Whether to take the union of the taxa. If false, takes the intersection
static CombinePhenotype getInstance(Phenotype[] phenotypes, boolean isUnion)
phenotypes -- List of Phenotype objects to combine
isUnion -- Whether to take the union of the taxa in the Phenotypes. If false,
takes the intersection
"""
pass
@javaOverload("setData",
(make_sig([java_imports['Taxon'],java_imports['Trait'],'double'],'void'),
(Taxon,Trait,metaDouble),None),
(make_sig(['int','int','double'],'void'),
(metaInteger,metaInteger,metaDouble),None))
def setData(self, *args):
""" Sets a particular data value
Signatures:
void setData(Taxon taxon, Trait trait, double value)
void setData(int taxon, int trait, double value)
Arguments:
void setData(Taxon taxon, Trait trait, double value)
taxon -- A Taxon instance
trait -- A Trait instance
value -- The value to set
void setData(int taxon, int trait, double value)
taxon -- index of a taxon
trait -- index of a trait
value -- The value to set
"""
pass
@javaOverload("simpleCopy",
(make_sig([],java_imports['SimplePhenotype']),(),
lambda x: SimplePhenotype(obj=x)))
def simpleCopy(self, *args):
""" Makes a copy of the CombinePhenotype as a SimplePhenotype
Signatures:
SimplePhenotype simpleCopy()
Returns:
A SimplePhenotype copy of the object
"""
pass
| bsd-3-clause |
dario61081/koalixcrm | koalixcrm/accounting/rest/restinterface.py | 2 | 2538 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from rest_framework import viewsets
from koalixcrm.accounting.models import Account, AccountingPeriod, Booking, ProductCategory
from koalixcrm.accounting.rest.account_rest import AccountJSONSerializer
from koalixcrm.accounting.rest.accounting_period_rest import AccountingPeriodJSONSerializer
from koalixcrm.accounting.rest.booking_rest import BookingJSONSerializer
from koalixcrm.accounting.rest.product_categorie_rest import ProductCategoryJSONSerializer
from koalixcrm.global_support_functions import ConditionalMethodDecorator
class AccountAsJSON(viewsets.ModelViewSet):
"""
API endpoint that allows accounts to be created, viewed and modified.
"""
queryset = Account.objects.all()
serializer_class = AccountJSONSerializer
filter_fields = ('account_type',)
@ConditionalMethodDecorator(method_decorator(login_required), settings.KOALIXCRM_REST_API_AUTH)
def dispatch(self, *args, **kwargs):
return super(AccountAsJSON, self).dispatch(*args, **kwargs)
class AccountingPeriodAsJSON(viewsets.ModelViewSet):
"""
API endpoint that allows accounting periods to be created, viewed and modified.
"""
queryset = AccountingPeriod.objects.all()
serializer_class = AccountingPeriodJSONSerializer
@ConditionalMethodDecorator(method_decorator(login_required), settings.KOALIXCRM_REST_API_AUTH)
def dispatch(self, *args, **kwargs):
return super(AccountingPeriodAsJSON, self).dispatch(*args, **kwargs)
class BookingAsJSON(viewsets.ModelViewSet):
"""
API endpoint that allows bookings to be created, viewed and modified.
"""
queryset = Booking.objects.all()
serializer_class = BookingJSONSerializer
@ConditionalMethodDecorator(method_decorator(login_required), settings.KOALIXCRM_REST_API_AUTH)
def dispatch(self, *args, **kwargs):
return super(BookingAsJSON, self).dispatch(*args, **kwargs)
class ProductCategoryAsJSON(viewsets.ModelViewSet):
"""
API endpoint that allows product categories to be created, viewed and modified.
"""
queryset = ProductCategory.objects.all()
serializer_class = ProductCategoryJSONSerializer
@ConditionalMethodDecorator(method_decorator(login_required), settings.KOALIXCRM_REST_API_AUTH)
def dispatch(self, *args, **kwargs):
return super(ProductCategoryAsJSON, self).dispatch(*args, **kwargs)
| bsd-3-clause |
jblackburne/scikit-learn | sklearn/gaussian_process/gpc.py | 42 | 31571 | """Gaussian processes classification."""
# Authors: Jan Hendrik Metzen <[email protected]>
#
# License: BSD 3 clause
import warnings
from operator import itemgetter
import numpy as np
from scipy.linalg import cholesky, cho_solve, solve
from scipy.optimize import fmin_l_bfgs_b
from scipy.special import erf
from sklearn.base import BaseEstimator, ClassifierMixin, clone
from sklearn.gaussian_process.kernels \
import RBF, CompoundKernel, ConstantKernel as C
from sklearn.utils.validation import check_X_y, check_is_fitted, check_array
from sklearn.utils import check_random_state
from sklearn.preprocessing import LabelEncoder
from sklearn.multiclass import OneVsRestClassifier, OneVsOneClassifier
# Values required for approximating the logistic sigmoid by
# error functions. coefs are obtained via:
# x = np.array([0, 0.6, 2, 3.5, 4.5, np.inf])
# b = logistic(x)
# A = (erf(np.dot(x, self.lambdas)) + 1) / 2
# coefs = lstsq(A, b)[0]
LAMBDAS = np.array([0.41, 0.4, 0.37, 0.44, 0.39])[:, np.newaxis]
COEFS = np.array([-1854.8214151, 3516.89893646, 221.29346712,
128.12323805, -2010.49422654])[:, np.newaxis]
class _BinaryGaussianProcessClassifierLaplace(BaseEstimator):
"""Binary Gaussian process classification based on Laplace approximation.
The implementation is based on Algorithm 3.1, 3.2, and 5.1 of
``Gaussian Processes for Machine Learning'' (GPML) by Rasmussen and
Williams.
Internally, the Laplace approximation is used for approximating the
non-Gaussian posterior by a Gaussian.
Currently, the implementation is restricted to using the logistic link
function.
Parameters
----------
kernel : kernel object
The kernel specifying the covariance function of the GP. If None is
passed, the kernel "1.0 * RBF(1.0)" is used as default. Note that
the kernel's hyperparameters are optimized during fitting.
optimizer : string or callable, optional (default: "fmin_l_bfgs_b")
Can either be one of the internally supported optimizers for optimizing
the kernel's parameters, specified by a string, or an externally
defined optimizer passed as a callable. If a callable is passed, it
must have the signature::
def optimizer(obj_func, initial_theta, bounds):
# * 'obj_func' is the objective function to be maximized, which
# takes the hyperparameters theta as parameter and an
# optional flag eval_gradient, which determines if the
# gradient is returned additionally to the function value
# * 'initial_theta': the initial value for theta, which can be
# used by local optimizers
# * 'bounds': the bounds on the values of theta
....
# Returned are the best found hyperparameters theta and
# the corresponding value of the target function.
return theta_opt, func_min
Per default, the 'fmin_l_bfgs_b' algorithm from scipy.optimize
is used. If None is passed, the kernel's parameters are kept fixed.
Available internal optimizers are::
'fmin_l_bfgs_b'
n_restarts_optimizer: int, optional (default: 0)
The number of restarts of the optimizer for finding the kernel's
parameters which maximize the log-marginal likelihood. The first run
of the optimizer is performed from the kernel's initial parameters,
the remaining ones (if any) from thetas sampled log-uniform randomly
from the space of allowed theta-values. If greater than 0, all bounds
must be finite. Note that n_restarts_optimizer=0 implies that one
run is performed.
max_iter_predict: int, optional (default: 100)
The maximum number of iterations in Newton's method for approximating
the posterior during predict. Smaller values will reduce computation
time at the cost of worse results.
warm_start : bool, optional (default: False)
If warm-starts are enabled, the solution of the last Newton iteration
on the Laplace approximation of the posterior mode is used as
initialization for the next call of _posterior_mode(). This can speed
up convergence when _posterior_mode is called several times on similar
problems as in hyperparameter optimization.
copy_X_train : bool, optional (default: True)
If True, a persistent copy of the training data is stored in the
object. Otherwise, just a reference to the training data is stored,
which might cause predictions to change if the data is modified
externally.
random_state : integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
Attributes
----------
X_train_ : array-like, shape = (n_samples, n_features)
Feature values in training data (also required for prediction)
y_train_: array-like, shape = (n_samples,)
Target values in training data (also required for prediction)
classes_ : array-like, shape = (n_classes,)
Unique class labels.
kernel_: kernel object
The kernel used for prediction. The structure of the kernel is the
same as the one passed as parameter but with optimized hyperparameters
L_: array-like, shape = (n_samples, n_samples)
Lower-triangular Cholesky decomposition of the kernel in X_train_
pi_: array-like, shape = (n_samples,)
The probabilities of the positive class for the training points
X_train_
W_sr_: array-like, shape = (n_samples,)
Square root of W, the Hessian of log-likelihood of the latent function
values for the observed labels. Since W is diagonal, only the diagonal
of sqrt(W) is stored.
log_marginal_likelihood_value_: float
The log-marginal-likelihood of ``self.kernel_.theta``
"""
def __init__(self, kernel=None, optimizer="fmin_l_bfgs_b",
n_restarts_optimizer=0, max_iter_predict=100,
warm_start=False, copy_X_train=True, random_state=None):
self.kernel = kernel
self.optimizer = optimizer
self.n_restarts_optimizer = n_restarts_optimizer
self.max_iter_predict = max_iter_predict
self.warm_start = warm_start
self.copy_X_train = copy_X_train
self.random_state = random_state
def fit(self, X, y):
"""Fit Gaussian process classification model
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Training data
y : array-like, shape = (n_samples,)
Target values, must be binary
Returns
-------
self : returns an instance of self.
"""
if self.kernel is None: # Use an RBF kernel as default
self.kernel_ = C(1.0, constant_value_bounds="fixed") \
* RBF(1.0, length_scale_bounds="fixed")
else:
self.kernel_ = clone(self.kernel)
self.rng = check_random_state(self.random_state)
self.X_train_ = np.copy(X) if self.copy_X_train else X
# Encode class labels and check that it is a binary classification
# problem
label_encoder = LabelEncoder()
self.y_train_ = label_encoder.fit_transform(y)
self.classes_ = label_encoder.classes_
if self.classes_.size > 2:
raise ValueError("%s supports only binary classification. "
"y contains classes %s"
% (self.__class__.__name__, self.classes_))
elif self.classes_.size == 1:
raise ValueError("{0:s} requires 2 classes.".format(
self.__class__.__name__))
if self.optimizer is not None and self.kernel_.n_dims > 0:
# Choose hyperparameters based on maximizing the log-marginal
# likelihood (potentially starting from several initial values)
def obj_func(theta, eval_gradient=True):
if eval_gradient:
lml, grad = self.log_marginal_likelihood(
theta, eval_gradient=True)
return -lml, -grad
else:
return -self.log_marginal_likelihood(theta)
# First optimize starting from theta specified in kernel
optima = [self._constrained_optimization(obj_func,
self.kernel_.theta,
self.kernel_.bounds)]
# Additional runs are performed from log-uniform chosen initial
# theta
if self.n_restarts_optimizer > 0:
if not np.isfinite(self.kernel_.bounds).all():
raise ValueError(
"Multiple optimizer restarts (n_restarts_optimizer>0) "
"requires that all bounds are finite.")
bounds = self.kernel_.bounds
for iteration in range(self.n_restarts_optimizer):
theta_initial = np.exp(self.rng.uniform(bounds[:, 0],
bounds[:, 1]))
optima.append(
self._constrained_optimization(obj_func, theta_initial,
bounds))
# Select result from run with minimal (negative) log-marginal
# likelihood
lml_values = list(map(itemgetter(1), optima))
self.kernel_.theta = optima[np.argmin(lml_values)][0]
self.log_marginal_likelihood_value_ = -np.min(lml_values)
else:
self.log_marginal_likelihood_value_ = \
self.log_marginal_likelihood(self.kernel_.theta)
# Precompute quantities required for predictions which are independent
# of actual query points
K = self.kernel_(self.X_train_)
_, (self.pi_, self.W_sr_, self.L_, _, _) = \
self._posterior_mode(K, return_temporaries=True)
return self
def predict(self, X):
"""Perform classification on an array of test vectors X.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Returns
-------
C : array, shape = (n_samples,)
Predicted target values for X, values are from ``classes_``
"""
check_is_fitted(self, ["X_train_", "y_train_", "pi_", "W_sr_", "L_"])
# As discussed on Section 3.4.2 of GPML, for making hard binary
# decisions, it is enough to compute the MAP of the posterior and
# pass it through the link function
K_star = self.kernel_(self.X_train_, X) # K_star =k(x_star)
f_star = K_star.T.dot(self.y_train_ - self.pi_) # Algorithm 3.2,Line 4
return np.where(f_star > 0, self.classes_[1], self.classes_[0])
def predict_proba(self, X):
"""Return probability estimates for the test vector X.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Returns
-------
C : array-like, shape = (n_samples, n_classes)
Returns the probability of the samples for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute ``classes_``.
"""
check_is_fitted(self, ["X_train_", "y_train_", "pi_", "W_sr_", "L_"])
# Based on Algorithm 3.2 of GPML
K_star = self.kernel_(self.X_train_, X) # K_star =k(x_star)
f_star = K_star.T.dot(self.y_train_ - self.pi_) # Line 4
v = solve(self.L_, self.W_sr_[:, np.newaxis] * K_star) # Line 5
# Line 6 (compute np.diag(v.T.dot(v)) via einsum)
var_f_star = self.kernel_.diag(X) - np.einsum("ij,ij->j", v, v)
# Line 7:
# Approximate \int log(z) * N(z | f_star, var_f_star)
# Approximation is due to Williams & Barber, "Bayesian Classification
# with Gaussian Processes", Appendix A: Approximate the logistic
# sigmoid by a linear combination of 5 error functions.
# For information on how this integral can be computed see
# blitiri.blogspot.de/2012/11/gaussian-integral-of-error-function.html
alpha = 1 / (2 * var_f_star)
gamma = LAMBDAS * f_star
integrals = np.sqrt(np.pi / alpha) \
* erf(gamma * np.sqrt(alpha / (alpha + LAMBDAS**2))) \
/ (2 * np.sqrt(var_f_star * 2 * np.pi))
pi_star = (COEFS * integrals).sum(axis=0) + .5 * COEFS.sum()
return np.vstack((1 - pi_star, pi_star)).T
def log_marginal_likelihood(self, theta=None, eval_gradient=False):
"""Returns log-marginal likelihood of theta for training data.
Parameters
----------
theta : array-like, shape = (n_kernel_params,) or None
Kernel hyperparameters for which the log-marginal likelihood is
evaluated. If None, the precomputed log_marginal_likelihood
of ``self.kernel_.theta`` is returned.
eval_gradient : bool, default: False
If True, the gradient of the log-marginal likelihood with respect
to the kernel hyperparameters at position theta is returned
additionally. If True, theta must not be None.
Returns
-------
log_likelihood : float
Log-marginal likelihood of theta for training data.
log_likelihood_gradient : array, shape = (n_kernel_params,), optional
Gradient of the log-marginal likelihood with respect to the kernel
hyperparameters at position theta.
Only returned when eval_gradient is True.
"""
if theta is None:
if eval_gradient:
raise ValueError(
"Gradient can only be evaluated for theta!=None")
return self.log_marginal_likelihood_value_
kernel = self.kernel_.clone_with_theta(theta)
if eval_gradient:
K, K_gradient = kernel(self.X_train_, eval_gradient=True)
else:
K = kernel(self.X_train_)
# Compute log-marginal-likelihood Z and also store some temporaries
# which can be reused for computing Z's gradient
Z, (pi, W_sr, L, b, a) = \
self._posterior_mode(K, return_temporaries=True)
if not eval_gradient:
return Z
# Compute gradient based on Algorithm 5.1 of GPML
d_Z = np.empty(theta.shape[0])
# XXX: Get rid of the np.diag() in the next line
R = W_sr[:, np.newaxis] * cho_solve((L, True), np.diag(W_sr)) # Line 7
C = solve(L, W_sr[:, np.newaxis] * K) # Line 8
# Line 9: (use einsum to compute np.diag(C.T.dot(C))))
s_2 = -0.5 * (np.diag(K) - np.einsum('ij, ij -> j', C, C)) \
* (pi * (1 - pi) * (1 - 2 * pi)) # third derivative
for j in range(d_Z.shape[0]):
C = K_gradient[:, :, j] # Line 11
# Line 12: (R.T.ravel().dot(C.ravel()) = np.trace(R.dot(C)))
s_1 = .5 * a.T.dot(C).dot(a) - .5 * R.T.ravel().dot(C.ravel())
b = C.dot(self.y_train_ - pi) # Line 13
s_3 = b - K.dot(R.dot(b)) # Line 14
d_Z[j] = s_1 + s_2.T.dot(s_3) # Line 15
return Z, d_Z
def _posterior_mode(self, K, return_temporaries=False):
"""Mode-finding for binary Laplace GPC and fixed kernel.
This approximates the posterior of the latent function values for given
inputs and target observations with a Gaussian approximation and uses
Newton's iteration to find the mode of this approximation.
"""
# Based on Algorithm 3.1 of GPML
# If warm_start are enabled, we reuse the last solution for the
# posterior mode as initialization; otherwise, we initialize with 0
if self.warm_start and hasattr(self, "f_cached") \
and self.f_cached.shape == self.y_train_.shape:
f = self.f_cached
else:
f = np.zeros_like(self.y_train_, dtype=np.float64)
# Use Newton's iteration method to find mode of Laplace approximation
log_marginal_likelihood = -np.inf
for _ in range(self.max_iter_predict):
# Line 4
pi = 1 / (1 + np.exp(-f))
W = pi * (1 - pi)
# Line 5
W_sr = np.sqrt(W)
W_sr_K = W_sr[:, np.newaxis] * K
B = np.eye(W.shape[0]) + W_sr_K * W_sr
L = cholesky(B, lower=True)
# Line 6
b = W * f + (self.y_train_ - pi)
# Line 7
a = b - W_sr * cho_solve((L, True), W_sr_K.dot(b))
# Line 8
f = K.dot(a)
# Line 10: Compute log marginal likelihood in loop and use as
# convergence criterion
lml = -0.5 * a.T.dot(f) \
- np.log(1 + np.exp(-(self.y_train_ * 2 - 1) * f)).sum() \
- np.log(np.diag(L)).sum()
# Check if we have converged (log marginal likelihood does
# not decrease)
# XXX: more complex convergence criterion
if lml - log_marginal_likelihood < 1e-10:
break
log_marginal_likelihood = lml
self.f_cached = f # Remember solution for later warm-starts
if return_temporaries:
return log_marginal_likelihood, (pi, W_sr, L, b, a)
else:
return log_marginal_likelihood
def _constrained_optimization(self, obj_func, initial_theta, bounds):
if self.optimizer == "fmin_l_bfgs_b":
theta_opt, func_min, convergence_dict = \
fmin_l_bfgs_b(obj_func, initial_theta, bounds=bounds)
if convergence_dict["warnflag"] != 0:
warnings.warn("fmin_l_bfgs_b terminated abnormally with the "
" state: %s" % convergence_dict)
elif callable(self.optimizer):
theta_opt, func_min = \
self.optimizer(obj_func, initial_theta, bounds=bounds)
else:
raise ValueError("Unknown optimizer %s." % self.optimizer)
return theta_opt, func_min
class GaussianProcessClassifier(BaseEstimator, ClassifierMixin):
"""Gaussian process classification (GPC) based on Laplace approximation.
The implementation is based on Algorithm 3.1, 3.2, and 5.1 of
Gaussian Processes for Machine Learning (GPML) by Rasmussen and
Williams.
Internally, the Laplace approximation is used for approximating the
non-Gaussian posterior by a Gaussian.
Currently, the implementation is restricted to using the logistic link
function. For multi-class classification, several binary one-versus rest
classifiers are fitted. Note that this class thus does not implement
a true multi-class Laplace approximation.
Parameters
----------
kernel : kernel object
The kernel specifying the covariance function of the GP. If None is
passed, the kernel "1.0 * RBF(1.0)" is used as default. Note that
the kernel's hyperparameters are optimized during fitting.
optimizer : string or callable, optional (default: "fmin_l_bfgs_b")
Can either be one of the internally supported optimizers for optimizing
the kernel's parameters, specified by a string, or an externally
defined optimizer passed as a callable. If a callable is passed, it
must have the signature::
def optimizer(obj_func, initial_theta, bounds):
# * 'obj_func' is the objective function to be maximized, which
# takes the hyperparameters theta as parameter and an
# optional flag eval_gradient, which determines if the
# gradient is returned additionally to the function value
# * 'initial_theta': the initial value for theta, which can be
# used by local optimizers
# * 'bounds': the bounds on the values of theta
....
# Returned are the best found hyperparameters theta and
# the corresponding value of the target function.
return theta_opt, func_min
Per default, the 'fmin_l_bfgs_b' algorithm from scipy.optimize
is used. If None is passed, the kernel's parameters are kept fixed.
Available internal optimizers are::
'fmin_l_bfgs_b'
n_restarts_optimizer: int, optional (default: 0)
The number of restarts of the optimizer for finding the kernel's
parameters which maximize the log-marginal likelihood. The first run
of the optimizer is performed from the kernel's initial parameters,
the remaining ones (if any) from thetas sampled log-uniform randomly
from the space of allowed theta-values. If greater than 0, all bounds
must be finite. Note that n_restarts_optimizer=0 implies that one
run is performed.
max_iter_predict: int, optional (default: 100)
The maximum number of iterations in Newton's method for approximating
the posterior during predict. Smaller values will reduce computation
time at the cost of worse results.
warm_start : bool, optional (default: False)
If warm-starts are enabled, the solution of the last Newton iteration
on the Laplace approximation of the posterior mode is used as
initialization for the next call of _posterior_mode(). This can speed
up convergence when _posterior_mode is called several times on similar
problems as in hyperparameter optimization.
copy_X_train : bool, optional (default: True)
If True, a persistent copy of the training data is stored in the
object. Otherwise, just a reference to the training data is stored,
which might cause predictions to change if the data is modified
externally.
random_state : integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
multi_class: string, default: "one_vs_rest"
Specifies how multi-class classification problems are handled.
Supported are "one_vs_rest" and "one_vs_one". In "one_vs_rest",
one binary Gaussian process classifier is fitted for each class, which
is trained to separate this class from the rest. In "one_vs_one", one
binary Gaussian process classifier is fitted for each pair of classes,
which is trained to separate these two classes. The predictions of
these binary predictors are combined into multi-class predictions.
Note that "one_vs_one" does not support predicting probability
estimates.
n_jobs : int, optional, default: 1
The number of jobs to use for the computation. If -1 all CPUs are used.
If 1 is given, no parallel computing code is used at all, which is
useful for debugging. For n_jobs below -1, (n_cpus + 1 + n_jobs) are
used. Thus for n_jobs = -2, all CPUs but one are used.
Attributes
----------
kernel_ : kernel object
The kernel used for prediction. In case of binary classification,
the structure of the kernel is the same as the one passed as parameter
but with optimized hyperparameters. In case of multi-class
classification, a CompoundKernel is returned which consists of the
different kernels used in the one-versus-rest classifiers.
log_marginal_likelihood_value_: float
The log-marginal-likelihood of ``self.kernel_.theta``
classes_ : array-like, shape = (n_classes,)
Unique class labels.
n_classes_ : int
The number of classes in the training data
"""
def __init__(self, kernel=None, optimizer="fmin_l_bfgs_b",
n_restarts_optimizer=0, max_iter_predict=100,
warm_start=False, copy_X_train=True, random_state=None,
multi_class="one_vs_rest", n_jobs=1):
self.kernel = kernel
self.optimizer = optimizer
self.n_restarts_optimizer = n_restarts_optimizer
self.max_iter_predict = max_iter_predict
self.warm_start = warm_start
self.copy_X_train = copy_X_train
self.random_state = random_state
self.multi_class = multi_class
self.n_jobs = n_jobs
def fit(self, X, y):
"""Fit Gaussian process classification model
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Training data
y : array-like, shape = (n_samples,)
Target values, must be binary
Returns
-------
self : returns an instance of self.
"""
X, y = check_X_y(X, y, multi_output=False)
self.base_estimator_ = _BinaryGaussianProcessClassifierLaplace(
self.kernel, self.optimizer, self.n_restarts_optimizer,
self.max_iter_predict, self.warm_start, self.copy_X_train,
self.random_state)
self.classes_ = np.unique(y)
self.n_classes_ = self.classes_.size
if self.n_classes_ == 1:
raise ValueError("GaussianProcessClassifier requires 2 or more "
"distinct classes. Only class %s present."
% self.classes_[0])
if self.n_classes_ > 2:
if self.multi_class == "one_vs_rest":
self.base_estimator_ = \
OneVsRestClassifier(self.base_estimator_,
n_jobs=self.n_jobs)
elif self.multi_class == "one_vs_one":
self.base_estimator_ = \
OneVsOneClassifier(self.base_estimator_,
n_jobs=self.n_jobs)
else:
raise ValueError("Unknown multi-class mode %s"
% self.multi_class)
self.base_estimator_.fit(X, y)
if self.n_classes_ > 2:
self.log_marginal_likelihood_value_ = np.mean(
[estimator.log_marginal_likelihood()
for estimator in self.base_estimator_.estimators_])
else:
self.log_marginal_likelihood_value_ = \
self.base_estimator_.log_marginal_likelihood()
return self
def predict(self, X):
"""Perform classification on an array of test vectors X.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Returns
-------
C : array, shape = (n_samples,)
Predicted target values for X, values are from ``classes_``
"""
check_is_fitted(self, ["classes_", "n_classes_"])
X = check_array(X)
return self.base_estimator_.predict(X)
def predict_proba(self, X):
"""Return probability estimates for the test vector X.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Returns
-------
C : array-like, shape = (n_samples, n_classes)
Returns the probability of the samples for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute `classes_`.
"""
check_is_fitted(self, ["classes_", "n_classes_"])
if self.n_classes_ > 2 and self.multi_class == "one_vs_one":
raise ValueError("one_vs_one multi-class mode does not support "
"predicting probability estimates. Use "
"one_vs_rest mode instead.")
X = check_array(X)
return self.base_estimator_.predict_proba(X)
@property
def kernel_(self):
if self.n_classes_ == 2:
return self.base_estimator_.kernel_
else:
return CompoundKernel(
[estimator.kernel_
for estimator in self.base_estimator_.estimators_])
def log_marginal_likelihood(self, theta=None, eval_gradient=False):
"""Returns log-marginal likelihood of theta for training data.
In the case of multi-class classification, the mean log-marginal
likelihood of the one-versus-rest classifiers are returned.
Parameters
----------
theta : array-like, shape = (n_kernel_params,) or none
Kernel hyperparameters for which the log-marginal likelihood is
evaluated. In the case of multi-class classification, theta may
be the hyperparameters of the compound kernel or of an individual
kernel. In the latter case, all individual kernel get assigned the
same theta values. If None, the precomputed log_marginal_likelihood
of ``self.kernel_.theta`` is returned.
eval_gradient : bool, default: False
If True, the gradient of the log-marginal likelihood with respect
to the kernel hyperparameters at position theta is returned
additionally. Note that gradient computation is not supported
for non-binary classification. If True, theta must not be None.
Returns
-------
log_likelihood : float
Log-marginal likelihood of theta for training data.
log_likelihood_gradient : array, shape = (n_kernel_params,), optional
Gradient of the log-marginal likelihood with respect to the kernel
hyperparameters at position theta.
Only returned when eval_gradient is True.
"""
check_is_fitted(self, ["classes_", "n_classes_"])
if theta is None:
if eval_gradient:
raise ValueError(
"Gradient can only be evaluated for theta!=None")
return self.log_marginal_likelihood_value_
theta = np.asarray(theta)
if self.n_classes_ == 2:
return self.base_estimator_.log_marginal_likelihood(
theta, eval_gradient)
else:
if eval_gradient:
raise NotImplementedError(
"Gradient of log-marginal-likelihood not implemented for "
"multi-class GPC.")
estimators = self.base_estimator_.estimators_
n_dims = estimators[0].kernel_.n_dims
if theta.shape[0] == n_dims: # use same theta for all sub-kernels
return np.mean(
[estimator.log_marginal_likelihood(theta)
for i, estimator in enumerate(estimators)])
elif theta.shape[0] == n_dims * self.classes_.shape[0]:
# theta for compound kernel
return np.mean(
[estimator.log_marginal_likelihood(
theta[n_dims * i:n_dims * (i + 1)])
for i, estimator in enumerate(estimators)])
else:
raise ValueError("Shape of theta must be either %d or %d. "
"Obtained theta with shape %d."
% (n_dims, n_dims * self.classes_.shape[0],
theta.shape[0]))
| bsd-3-clause |
lidiamcfreitas/FenixScheduleMaker | ScheduleMaker/brython/www/src/Lib/encodings/cp1254.py | 37 | 13809 | """ Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1254',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u2030' # 0x89 -> PER MILLE SIGN
'\u0160' # 0x8A -> LATIN CAPITAL LETTER S WITH CARON
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\u02dc' # 0x98 -> SMALL TILDE
'\u2122' # 0x99 -> TRADE MARK SIGN
'\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u011e' # 0xD0 -> LATIN CAPITAL LETTER G WITH BREVE
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0130' # 0xDD -> LATIN CAPITAL LETTER I WITH DOT ABOVE
'\u015e' # 0xDE -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\u011f' # 0xF0 -> LATIN SMALL LETTER G WITH BREVE
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0131' # 0xFD -> LATIN SMALL LETTER DOTLESS I
'\u015f' # 0xFE -> LATIN SMALL LETTER S WITH CEDILLA
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-2-clause |
chainer/chainer | chainer/utils/sparse.py | 3 | 7680 | import numpy
import chainer
from chainer import backend
from chainer import cuda
def _add_at(add_at, x, row, col, data):
assert data.size > 0
last_nz = data.size - (data != 0)[::-1].argmax()
add_at(x, (row[:last_nz], col[:last_nz]), data[:last_nz])
class CooMatrix(object):
"""A sparse matrix in COO format.
Args:
data (:ref:`ndarray`): The entries of the matrix.
The entries are usually non-zero-elements in the matrix.
row (:ref:`ndarray`): The row indices of the matrix
entries.
col (:ref:`ndarray`): The column indices of the matrix
entries.
shape (tuple of int): The shape of the matrix in dense format.
order ('C', 'F', 'other' or None): If ``'C'``, the maxtix is assumed
that its row indices are sorted. If ``'F'``, the matrix is assumed
that its column indices are sorted. If ``'other'``, the matrix is
assumed as neither 'C' order nor 'F' order. If ``None`` (this is
the default), the matrix is automatically checked if it is 'C'
order, 'F' order or another. This information will be used by some
functions like :func:`~chainer.functions.sparse_matmul` as a hint
to improve performance.
requires_grad (bool): If ``True``, gradient of this sparse matrix will
be computed in back-propagation.
.. seealso::
See :func:`~chainer.utils.to_coo` for how to construct a COO matrix
from an array.
"""
def __init__(self, data, row, col, shape, order=None,
requires_grad=False):
if not (1 <= data.ndim <= 2):
raise ValueError('ndim of data must be 1 or 2.')
if not (data.ndim == row.ndim == col.ndim):
raise ValueError('ndim of data, row and col must be the same.')
if len(shape) != 2:
raise ValueError('length of shape must be 2.')
if not (shape[0] > 0 and shape[1] > 0):
raise ValueError('numbers in shape must be greater than 0.')
if order not in ('C', 'F', 'other', None):
raise ValueError('order must be \'C\', \'F\', \'other\' or None.')
self.data = chainer.Variable(data, requires_grad=requires_grad)
self.row = row
self.col = col
self.shape = shape # (row, col)
self.order = order
if order is None:
self.order = get_order(row, col)
def to_dense(self):
"""Returns a dense matrix format of this sparse matrix."""
data = self.data
if data.ndim == 1:
shape = self.shape
elif data.ndim == 2:
shape = (data.shape[0], *self.shape)
else:
assert False
xp = data.xp
x = xp.zeros(shape, dtype=data.dtype)
if data.size > 0:
row = self.row
col = self.col
if xp is numpy:
add_at = numpy.add.at
elif xp is cuda.cupy:
add_at = cuda.cupyx.scatter_add
data = data.array
if data.ndim == 1:
_add_at(add_at, x, row, col, data)
elif data.ndim == 2:
for i in range(data.shape[0]):
_add_at(add_at, x[i], row[i], col[i], data[i])
else:
assert False
return x
def to_coo(x, ldnz=None, requires_grad=False):
"""Returns a single or a batch of matrices in COO format.
Args:
x (:ref:`ndarray`): Input dense matrix. The ndim of
``x`` must be two or three. If ndim is two, it is treated as
a single matrix. If three, it is treated as batched matrices.
ldnz (int): Size of arrays for data, row index and column index to be
created. The Actual size becomes max(nnz, ldnz) where nnz is number
of non-zero elements in a input dense matrix.
requires_grad (bool): If ``True``, gradient of sparse matrix will be
computed in back-propagation.
Returns:
~chainer.utils.CooMatrix: A sparse matrix or batched sparse matrices
in COO format of a given dense matrix or batched dense matrices.
.. admonition:: Example
Create a :class:`~chainer.utils.CooMatrix` from an array with 2
non-zero elements and 4 zeros and access its attributes. No batch
dimension is involved.
.. doctest::
>>> data = np.array([[0, 2, 0], [-1, 0, 0]], np.float32)
>>> x = chainer.utils.to_coo(data)
>>> x.data
variable([ 2., -1.])
>>> x.row
array([0, 1], dtype=int32)
>>> x.col
array([1, 0], dtype=int32)
>>> x.shape
(2, 3)
"""
xp = backend.get_array_module(x)
if x.ndim == 2:
_row, _col = xp.where(x != 0)
nnz = len(_row)
if ldnz is None or ldnz < nnz:
ldnz = nnz
data = xp.zeros((ldnz), dtype=x.dtype)
row = xp.full((ldnz), -1, dtype=xp.int32)
col = xp.full((ldnz), -1, dtype=xp.int32)
data[:nnz] = x[_row, _col]
row[:nnz] = xp.array(_row).astype(xp.int32)
col[:nnz] = xp.array(_col).astype(xp.int32)
shape = x.shape
return CooMatrix(data, row, col, shape,
requires_grad=requires_grad)
elif x.ndim == 3:
# first axis is batch axis
nb = x.shape[0]
if ldnz is None:
ldnz = 0
for i in range(nb):
ldnz = max(ldnz, len(xp.where(x[i] != 0)[0]))
data = xp.empty((nb, ldnz), dtype=x.dtype)
row = xp.empty((nb, ldnz), dtype=xp.int32)
col = xp.empty((nb, ldnz), dtype=xp.int32)
for i in range(nb):
coo = to_coo(x[i], ldnz)
data[i] = coo.data.data
row[i] = coo.row
col[i] = coo.col
shape = x.shape[1:]
return CooMatrix(data, row, col, shape,
requires_grad=requires_grad)
else:
raise ValueError('ndim of x must be 2 or 3.')
def get_order(row, col):
"""Check if a coo matrix with given row and col is C or F order.
Args:
row (:ref:`ndarray`): The row indices of the matrix
entries.
col (:ref:`ndarray`): The column indices of the matrix
entries.
Returns:
Returns ``'C'`` when a coo matrix with given row and column indices is
C order, in other words, the row indices are sorted. Returns ``'F'``
when it is F order, in other words, the column indices are sorted.
Returns ``'other'`` otherwise.
"""
if _is_c_order(row, col):
return 'C'
if _is_c_order(col, row):
return 'F'
return 'other'
def _is_c_order(row, col):
"""Check if a coo matrix with given row and col is c_order"""
if row.shape != col.shape:
raise ValueError('shape of row and col must be the same.')
if row.ndim != 1:
for i in range(row.shape[0]):
if not _is_c_order(row[i], col[i]):
return False
return True
xp = backend.get_array_module(row)
_row = row[col >= 0]
_col = col[row >= 0]
if _row[_row < 0].size > 0 or _col[_col < 0].size:
raise ValueError('invalid index combination of row and col.')
if _row.shape[0] <= 1:
return True
row_diff = xp.zeros(_row.shape, dtype=_row.dtype)
row_diff[1:] = _row[1:] - _row[:-1]
if xp.amin(row_diff) < 0:
return False
col_diff = xp.zeros(_col.shape, dtype=_col.dtype)
col_diff[1:] = _col[1:] - _col[:-1]
col_diff[(row_diff > 0)] = 0
return xp.amin(col_diff) >= 0
| mit |
sudheesh001/oh-mainline | vendor/packages/scrapy/scrapy/contrib/httpcache.py | 16 | 2156 | from __future__ import with_statement
import os
from time import time
import cPickle as pickle
from scrapy.http import Headers
from scrapy.responsetypes import responsetypes
from scrapy.utils.request import request_fingerprint
from scrapy.utils.project import data_path
from scrapy import conf
class DbmCacheStorage(object):
def __init__(self, settings=conf.settings):
self.cachedir = data_path(settings['HTTPCACHE_DIR'])
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.dbmodule = __import__(settings['HTTPCACHE_DBM_MODULE'])
self.dbs = {}
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.db' % spider.name)
self.dbs[spider] = self.dbmodule.open(dbpath, 'c')
def close_spider(self, spider):
self.dbs[spider].close()
def retrieve_response(self, spider, request):
data = self._read_data(spider, request)
if data is None:
return # not cached
url = data['url']
status = data['status']
headers = Headers(data['headers'])
body = data['body']
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
key = self._request_key(request)
data = {
'status': response.status,
'url': response.url,
'headers': dict(response.headers),
'body': response.body,
}
self.dbs[spider]['%s_data' % key] = pickle.dumps(data, protocol=2)
self.dbs[spider]['%s_time' % key] = str(time())
def _read_data(self, spider, request):
key = self._request_key(request)
db = self.dbs[spider]
tkey = '%s_time' % key
if not db.has_key(tkey):
return # not found
ts = db[tkey]
if 0 < self.expiration_secs < time() - float(ts):
return # expired
return pickle.loads(db['%s_data' % key])
def _request_key(self, request):
return request_fingerprint(request)
| agpl-3.0 |
proudlygeek/proudlygeek-blog | pygments/lexers/other.py | 14 | 129636 | # -*- coding: utf-8 -*-
"""
pygments.lexers.other
~~~~~~~~~~~~~~~~~~~~~
Lexers for other languages.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
this, do_insertions
from pygments.token import Error, Punctuation, Literal, Token, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
from pygments.lexers.web import HtmlLexer
__all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer',
'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer',
'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer',
'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer',
'PostScriptLexer', 'AutohotkeyLexer', 'GoodDataCLLexer',
'MaqlLexer', 'ProtoBufLexer', 'HybrisLexer']
line_re = re.compile('.*?\n')
class SqlLexer(RegexLexer):
"""
Lexer for Structured Query Language. Currently, this lexer does
not recognize any special syntax except ANSI SQL.
"""
name = 'SQL'
aliases = ['sql']
filenames = ['*.sql']
mimetypes = ['text/x-sql']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'--.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
(r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?^-]', Operator),
(r'[0-9]+', Number.Integer),
# TODO: Backslash escapes?
(r"'(''|[^'])*'", String.Single),
(r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'[;:()\[\],\.]', Punctuation)
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[^/\*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
]
}
class MySqlLexer(RegexLexer):
"""
Special lexer for MySQL.
"""
name = 'MySQL'
aliases = ['mysql']
mimetypes = ['text/x-mysql']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'(#|--\s+).*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'[0-9]+', Number.Integer),
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
# TODO: add backslash escapes
(r"'(''|[^'])*'", String.Single),
(r'"(""|[^"])*"', String.Double),
(r"`(``|[^`])*`", String.Symbol),
(r'[+*/<>=~!@#%^&|`?^-]', Operator),
(r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
r'precision|real|numeric|dec|decimal|timestamp|year|char|'
r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
bygroups(Keyword.Type, Text, Punctuation)),
(r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
r'character|check|collate|column|condition|constraint|continue|'
r'convert|create|cross|current_date|current_time|'
r'current_timestamp|current_user|cursor|database|databases|'
r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
r'declare|default|delayed|delete|desc|describe|deterministic|'
r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
r'|for|force|foreign|from|fulltext|grant|group|having|'
r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
r'minute_microsecond|minute_second|mod|modifies|natural|'
r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
r'replace|require|restrict|return|revoke|right|rlike|schema|'
r'schemas|second_microsecond|select|sensitive|separator|set|'
r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
r'varying|when|where|while|with|write|x509|xor|year_month|'
r'zerofill)\b', Keyword),
# TODO: this list is not complete
(r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
(r'(true|false|null)', Name.Constant),
(r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
(r'[;:()\[\],\.]', Punctuation)
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[^/\*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
]
}
class SqliteConsoleLexer(Lexer):
"""
Lexer for example sessions using sqlite3.
*New in Pygments 0.11.*
"""
name = 'sqlite3con'
aliases = ['sqlite3']
filenames = ['*.sqlite3-console']
mimetypes = ['text/x-sqlite3-console']
def get_tokens_unprocessed(self, data):
sql = SqlLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(data):
line = match.group()
if line.startswith('sqlite> ') or line.startswith(' ...> '):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:8])]))
curcode += line[8:]
else:
if curcode:
for item in do_insertions(insertions,
sql.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
if line.startswith('SQL error: '):
yield (match.start(), Generic.Traceback, line)
else:
yield (match.start(), Generic.Output, line)
if curcode:
for item in do_insertions(insertions,
sql.get_tokens_unprocessed(curcode)):
yield item
class BrainfuckLexer(RegexLexer):
"""
Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
language.
"""
name = 'Brainfuck'
aliases = ['brainfuck', 'bf']
filenames = ['*.bf', '*.b']
mimetypes = ['application/x-brainfuck']
tokens = {
'common': [
# use different colors for different instruction types
(r'[.,]+', Name.Tag),
(r'[+-]+', Name.Builtin),
(r'[<>]+', Name.Variable),
(r'[^.,+\-<>\[\]]+', Comment),
],
'root': [
(r'\[', Keyword, 'loop'),
(r'\]', Error),
include('common'),
],
'loop': [
(r'\[', Keyword, '#push'),
(r'\]', Keyword, '#pop'),
include('common'),
]
}
class BefungeLexer(RegexLexer):
"""
Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
language.
*New in Pygments 0.7.*
"""
name = 'Befunge'
aliases = ['befunge']
filenames = ['*.befunge']
mimetypes = ['application/x-befunge']
tokens = {
'root': [
(r'[0-9a-f]', Number),
(r'[\+\*/%!`-]', Operator), # Traditional math
(r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
(r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
(r'[|_mw]', Keyword),
(r'[{}]', Name.Tag), # Befunge-98 stack ops
(r'".*?"', String.Double), # Strings don't appear to allow escapes
(r'\'.', String.Single), # Single character
(r'[#;]', Comment), # Trampoline... depends on direction hit
(r'[pg&~=@iotsy]', Keyword), # Misc
(r'[()A-Z]', Comment), # Fingerprints
(r'\s+', Text), # Whitespace doesn't matter
],
}
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
*New in Pygments 0.6.*
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)\s*\b',
Keyword),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r';', Text),
(r'\s+', Text),
(r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
return shebang_matches(text, r'(ba|z|)sh')
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
*New in Pygments 1.1.*
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]'
r'[^\n]+\].+)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
*New in Pygments 0.7.*
"""
name = 'Batchfile'
aliases = ['bat']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
Contributed by Adam Blinkinsop <[email protected]>.
*New in Pygments 0.8.*
"""
name = 'Redcode'
aliases = ['redcode']
filenames = ['*.cw']
opcodes = ['DAT','MOV','ADD','SUB','MUL','DIV','MOD',
'JMP','JMZ','JMN','DJN','CMP','SLT','SPL',
'ORG','EQU','END']
modifiers = ['A','B','AB','BA','F','X','I']
tokens = {
'root': [
# Whitespace:
(r'\s+', Text),
(r';.*$', Comment.Single),
# Lexemes:
# Identifiers
(r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
(r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
(r'[A-Za-z_][A-Za-z_0-9]+', Name),
# Operators
(r'[-+*/%]', Operator),
(r'[#$@<>]', Operator), # mode
(r'[.,]', Punctuation), # mode
# Numbers
(r'[-+]?\d+', Number.Integer),
],
}
class MOOCodeLexer(RegexLexer):
"""
For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
language).
*New in Pygments 0.9.*
"""
name = 'MOOCode'
filenames = ['*.moo']
aliases = ['moocode']
mimetypes = ['text/x-moocode']
tokens = {
'root' : [
# Numbers
(r'(0|[1-9][0-9_]*)', Number.Integer),
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
# exceptions
(r'(E_PERM|E_DIV)', Name.Exception),
# db-refs
(r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity),
# Keywords
(r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
r'|endwhile|break|continue|return|try'
r'|except|endtry|finally|in)\b', Keyword),
# builtins
(r'(random|length)', Name.Builtin),
# special variables
(r'(player|caller|this|args)', Name.Variable.Instance),
# skip whitespace
(r'\s+', Text),
(r'\n', Text),
# other operators
(r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator),
# function call
(r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)),
# variables
(r'([a-zA-Z_0-9]+)', Text),
]
}
class SmalltalkLexer(RegexLexer):
"""
For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
Contributed by Stefan Matthias Aust.
Rewritten by Nils Winter.
*New in Pygments 0.10.*
"""
name = 'Smalltalk'
filenames = ['*.st']
aliases = ['smalltalk', 'squeak']
mimetypes = ['text/x-smalltalk']
tokens = {
'root' : [
(r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
include('squeak fileout'),
include('whitespaces'),
include('method definition'),
(r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
include('objects'),
(r'\^|\:=|\_', Operator),
# temporaries
(r'[\]({}.;!]', Text),
],
'method definition' : [
# Not perfect can't allow whitespaces at the beginning and the
# without breaking everything
(r'([a-zA-Z]+\w*:)(\s*)(\w+)',
bygroups(Name.Function, Text, Name.Variable)),
(r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
(r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
bygroups(Name.Function, Text, Name.Variable, Text)),
],
'blockvariables' : [
include('whitespaces'),
(r'(:)(\s*)([A-Za-z\w]+)',
bygroups(Operator, Text, Name.Variable)),
(r'\|', Operator, '#pop'),
(r'', Text, '#pop'), # else pop
],
'literals' : [
(r'\'[^\']*\'', String, 'afterobject'),
(r'\$.', String.Char, 'afterobject'),
(r'#\(', String.Symbol, 'parenth'),
(r'\)', Text, 'afterobject'),
(r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
],
'_parenth_helper' : [
include('whitespaces'),
(r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
(r'[-+*/\\~<>=|&#!?,@%\w+:]+', String.Symbol),
# literals
(r'\'[^\']*\'', String),
(r'\$.', String.Char),
(r'#*\(', String.Symbol, 'inner_parenth'),
],
'parenth' : [
# This state is a bit tricky since
# we can't just pop this state
(r'\)', String.Symbol, ('root','afterobject')),
include('_parenth_helper'),
],
'inner_parenth': [
(r'\)', String.Symbol, '#pop'),
include('_parenth_helper'),
],
'whitespaces' : [
# skip whitespace and comments
(r'\s+', Text),
(r'"[^"]*"', Comment),
],
'objects' : [
(r'\[', Text, 'blockvariables'),
(r'\]', Text, 'afterobject'),
(r'\b(self|super|true|false|nil|thisContext)\b',
Name.Builtin.Pseudo, 'afterobject'),
(r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
(r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
(r'#("[^"]*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
String.Symbol, 'afterobject'),
include('literals'),
],
'afterobject' : [
(r'! !$', Keyword , '#pop'), # squeak chunk delimeter
include('whitespaces'),
(r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
Name.Builtin, '#pop'),
(r'\b(new\b(?!:))', Name.Builtin),
(r'\:=|\_', Operator, '#pop'),
(r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
(r'\b[a-zA-Z]+\w*', Name.Function),
(r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
(r'\.', Punctuation, '#pop'),
(r';', Punctuation),
(r'[\])}]', Text),
(r'[\[({]', Text, '#pop'),
],
'squeak fileout' : [
# Squeak fileout format (optional)
(r'^"[^"]*"!', Keyword),
(r"^'[^']*'!", Keyword),
(r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
(r'^(!)(\w+(?: class)?)( methodsFor: )(\'[^\']*\')(.*?!)',
bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
(r'^(\w+)( subclass: )(#\w+)'
r'(\s+instanceVariableNames: )(.*?)'
r'(\s+classVariableNames: )(.*?)'
r'(\s+poolDictionaries: )(.*?)'
r'(\s+category: )(.*?)(!)',
bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
String, Keyword, String, Keyword, String, Keyword)),
(r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
bygroups(Name.Class, Keyword, String, Keyword)),
(r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
(r'! !$', Keyword),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
*New in Pygments 0.10.*
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|'
r'fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|'
r'sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|'
r'source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class LogtalkLexer(RegexLexer):
"""
For `Logtalk <http://logtalk.org/>`_ source code.
*New in Pygments 0.10.*
"""
name = 'Logtalk'
aliases = ['logtalk']
filenames = ['*.lgt']
mimetypes = ['text/x-logtalk']
tokens = {
'root': [
# Directives
(r'^\s*:-\s',Punctuation,'directive'),
# Comments
(r'%.*?\n', Comment),
(r'/\*(.|\n)*?\*/',Comment),
# Whitespace
(r'\n', Text),
(r'\s+', Text),
# Numbers
(r"0'.", Number),
(r'0b[01]+', Number),
(r'0o[0-7]+', Number),
(r'0x[0-9a-fA-F]+', Number),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Event handlers
(r'(after|before)(?=[(])', Keyword),
# Execution-context methods
(r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
# Reflection
(r'(current_predicate|predicate_property)(?=[(])', Keyword),
# DCGs and term expansion
(r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])',
Keyword),
# Entity
(r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
Keyword),
(r'(object|protocol|category)_property(?=[(])', Keyword),
# Entity relations
(r'complements_object(?=[(])', Keyword),
(r'extends_(object|protocol|category)(?=[(])', Keyword),
(r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
(r'(instantiat|specializ)es_class(?=[(])', Keyword),
# Events
(r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
# Flags
(r'(current|set)_logtalk_flag(?=[(])', Keyword),
# Compiling, loading, and library paths
(r'logtalk_(compile|l(ibrary_path|oad))(?=[(])', Keyword),
# Database
(r'(clause|retract(all)?)(?=[(])', Keyword),
(r'a(bolish|ssert(a|z))(?=[(])', Keyword),
# Control
(r'(ca(ll|tch)|throw)(?=[(])', Keyword),
(r'(fail|true)\b', Keyword),
# All solutions
(r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
# Multi-threading meta-predicates
(r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])',
Keyword),
# Term unification
(r'unify_with_occurs_check(?=[(])', Keyword),
# Term creation and decomposition
(r'(functor|arg|copy_term)(?=[(])', Keyword),
# Evaluable functors
(r'(rem|mod|abs|sign)(?=[(])', Keyword),
(r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
(r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
# Other arithmetic functors
(r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
# Term testing
(r'(var|atom(ic)?|integer|float|compound|n(onvar|umber))(?=[(])',
Keyword),
# Stream selection and control
(r'(curren|se)t_(in|out)put(?=[(])', Keyword),
(r'(open|close)(?=[(])', Keyword),
(r'flush_output(?=[(])', Keyword),
(r'(at_end_of_stream|flush_output)\b', Keyword),
(r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])',
Keyword),
# Character and byte input/output
(r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
(r'\bnl\b', Keyword),
# Term input/output
(r'read(_term)?(?=[(])', Keyword),
(r'write(q|_(canonical|term))?(?=[(])', Keyword),
(r'(current_)?op(?=[(])', Keyword),
(r'(current_)?char_conversion(?=[(])', Keyword),
# Atomic term processing
(r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
(r'(char_code|sub_atom)(?=[(])', Keyword),
(r'number_c(har|ode)s(?=[(])', Keyword),
# Implementation defined hooks functions
(r'(se|curren)t_prolog_flag(?=[(])', Keyword),
(r'\bhalt\b', Keyword),
(r'halt(?=[(])', Keyword),
# Message sending operators
(r'(::|:|\^\^)', Operator),
# External call
(r'[{}]', Keyword),
# Logic and control
(r'\bonce(?=[(])', Keyword),
(r'\brepeat\b', Keyword),
# Bitwise functors
(r'(>>|<<|/\\|\\\\|\\)', Operator),
# Arithemtic evaluation
(r'\bis\b', Keyword),
# Arithemtic comparison
(r'(=:=|=\\=|<|=<|>=|>)', Operator),
# Term creation and decomposition
(r'=\.\.', Operator),
# Term unification
(r'(=|\\=)', Operator),
# Term comparison
(r'(==|\\==|@=<|@<|@>=|@>)', Operator),
# Evaluable functors
(r'(//|[-+*/])', Operator),
(r'\b(mod|rem)\b', Operator),
# Other arithemtic functors
(r'\b\*\*\b', Operator),
# DCG rules
(r'-->', Operator),
# Control constructs
(r'([!;]|->)', Operator),
# Logic and control
(r'\\+', Operator),
# Mode operators
(r'[?@]', Operator),
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
# Ponctuation
(r'[()\[\],.|]', Text),
# Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
(r"[']", String, 'quoted_atom'),
],
'quoted_atom': [
(r"['][']", String),
(r"[']", String, '#pop'),
(r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
(r"[^\\'\n]+", String),
(r'\\', String),
],
'directive': [
# Conditional compilation directives
(r'(el)?if(?=[(])', Keyword, 'root'),
(r'(e(lse|ndif))[.]', Keyword, 'root'),
# Entity directives
(r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
(r'(end_(category|object|protocol))[.]',Keyword, 'root'),
# Predicate scope directives
(r'(public|protected|private)(?=[(])', Keyword, 'root'),
# Other directives
(r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
(r'in(fo|itialization)(?=[(])', Keyword, 'root'),
(r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
(r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|'
r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
(r'op(?=[(])', Keyword, 'root'),
(r'(calls|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
(r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
(r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
],
'entityrelations': [
(r'(extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])',
Keyword),
# Numbers
(r"0'.", Number),
(r'0b[01]+', Number),
(r'0o[0-7]+', Number),
(r'0x[0-9a-fA-F]+', Number),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
(r"[']", String, 'quoted_atom'),
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
# End of entity-opening directive
(r'([)]\.)', Text, 'root'),
# Scope operator
(r'(::)', Operator),
# Ponctuation
(r'[()\[\],.|]', Text),
# Comments
(r'%.*?\n', Comment),
(r'/\*(.|\n)*?\*/',Comment),
# Whitespace
(r'\n', Text),
(r'\s+', Text),
]
}
def analyse_text(text):
if ':- object(' in text:
return True
if ':- protocol(' in text:
return True
if ':- category(' in text:
return True
return False
def _shortened(word):
dpos = word.find('$')
return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b'
for i in range(len(word), dpos, -1)])
def _shortened_many(*words):
return '|'.join(map(_shortened, words))
class GnuplotLexer(RegexLexer):
"""
For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
*New in Pygments 0.11.*
"""
name = 'Gnuplot'
aliases = ['gnuplot']
filenames = ['*.plot', '*.plt']
mimetypes = ['text/x-gnuplot']
tokens = {
'root': [
include('whitespace'),
(_shortened('bi$nd'), Keyword, 'bind'),
(_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
(_shortened('f$it'), Keyword, 'fit'),
(r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
(r'else\b', Keyword),
(_shortened('pa$use'), Keyword, 'pause'),
(_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
(_shortened('sa$ve'), Keyword, 'save'),
(_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
(_shortened_many('sh$ow', 'uns$et'),
Keyword, ('noargs', 'optionarg')),
(_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
'pwd$', 're$read', 'res$et', 'scr$eendump',
'she$ll', 'sy$stem', 'up$date'),
Keyword, 'genericargs'),
(_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
'she$ll', 'test$'),
Keyword, 'noargs'),
('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(=)',
bygroups(Name.Variable, Text, Operator), 'genericargs'),
('([a-zA-Z_][a-zA-Z0-9_]*)(\s*\(.*?\)\s*)(=)',
bygroups(Name.Function, Text, Operator), 'genericargs'),
(r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
(r';', Keyword),
],
'comment': [
(r'[^\\\n]', Comment),
(r'\\\n', Comment),
(r'\\', Comment),
# don't add the newline to the Comment token
('', Comment, '#pop'),
],
'whitespace': [
('#', Comment, 'comment'),
(r'[ \t\v\f]+', Text),
],
'noargs': [
include('whitespace'),
# semicolon and newline end the argument list
(r';', Punctuation, '#pop'),
(r'\n', Text, '#pop'),
],
'dqstring': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
(r'\n', String, '#pop'), # newline ends the string too
],
'sqstring': [
(r"''", String), # escaped single quote
(r"'", String, '#pop'),
(r"[^\\'\n]+", String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # normal backslash
(r'\n', String, '#pop'), # newline ends the string too
],
'genericargs': [
include('noargs'),
(r'"', String, 'dqstring'),
(r"'", String, 'sqstring'),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
(r'(\d+\.\d*|\.\d+)', Number.Float),
(r'-?\d+', Number.Integer),
('[,.~!%^&*+=|?:<>/-]', Operator),
('[{}()\[\]]', Punctuation),
(r'(eq|ne)\b', Operator.Word),
(r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
(r'\\\n', Text),
],
'optionarg': [
include('whitespace'),
(_shortened_many(
"a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der",
"box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta",
"data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign",
"fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid",
"hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle",
"la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale",
"mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin",
"rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot",
"mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics",
"nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics",
"mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput",
"pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot",
"poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze",
"st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs",
"ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le",
"v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta",
"yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel",
"yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs",
"x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs",
"zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs",
"x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs",
"noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs",
"xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs",
"noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs",
"cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange",
"y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange",
"vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis",
"zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'),
],
'bind': [
('!', Keyword, '#pop'),
(_shortened('all$windows'), Name.Builtin),
include('genericargs'),
],
'quit': [
(r'gnuplot\b', Keyword),
include('noargs'),
],
'fit': [
(r'via\b', Name.Builtin),
include('plot'),
],
'if': [
(r'\)', Punctuation, '#pop'),
include('genericargs'),
],
'pause': [
(r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
(_shortened('key$press'), Name.Builtin),
include('genericargs'),
],
'plot': [
(_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
'mat$rix', 's$mooth', 'thru$', 't$itle',
'not$itle', 'u$sing', 'w$ith'),
Name.Builtin),
include('genericargs'),
],
'save': [
(_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
Name.Builtin),
include('genericargs'),
],
}
class PovrayLexer(RegexLexer):
"""
For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
*New in Pygments 0.11.*
"""
name = 'POVRay'
aliases = ['pov']
filenames = ['*.pov', '*.inc']
mimetypes = ['text/x-povray']
tokens = {
'root': [
(r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'//.*\n', Comment.Single),
(r'(?s)"(?:\\.|[^"\\])+"', String.Double),
(r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|'
r'include|range|read|render|statistics|switch|undef|version|'
r'warning|while|write|define|macro|local|declare)',
Comment.Preproc),
(r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
r'attenuating|average|background|black_hole|blue|blur_samples|'
r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
r'color|color_map|colour|colour_map|component|composite|concat|'
r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
r'debug|declare|default|degrees|dents|diffuse|direction|'
r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
r'else|emitting|end|error|error_bound|exp|exponent|'
r'fade_distance|fade_power|falloff|falloff_angle|false|'
r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
r'global_settings|glowing|gradient|granite|gray_threshold|'
r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
r'linear|linear_spline|linear_sweep|location|log|looks_like|'
r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
r'matrix|max|max_intersections|max_iteration|max_trace_level|'
r'max_value|metallic|min|minimum_reuse|mod|mortar|'
r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
r'orthographic|panoramic|pattern1|pattern2|pattern3|'
r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
r'precision|pwr|quadratic_spline|quaternion|quick_color|'
r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
r'tightness|tile2|tiles|track|transform|translate|transmit|'
r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
r'vnormalize|volume_object|volume_rendered|vol_with_light|'
r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
r'wood|wrinkles|yes)\b', Keyword),
(r'bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
r'disc|height_field|intersection|julia_fractal|lathe|'
r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
r'text|torus|triangle|union', Name.Builtin),
# TODO: <=, etc
(r'[\[\](){}<>;,]', Punctuation),
(r'[-+*/=]', Operator),
(r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
(r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
(r'[0-9]+\.[0-9]*', Number.Float),
(r'\.[0-9]+', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\s+', Text),
]
}
class AppleScriptLexer(RegexLexer):
"""
For `AppleScript source code
<http://developer.apple.com/documentation/AppleScript/
Conceptual/AppleScriptLangGuide>`_,
including `AppleScript Studio
<http://developer.apple.com/documentation/AppleScript/
Reference/StudioReference>`_.
Contributed by Andreas Amann <[email protected]>.
"""
name = 'AppleScript'
aliases = ['applescript']
filenames = ['*.applescript']
flags = re.MULTILINE | re.DOTALL
Identifiers = r'[a-zA-Z]\w*'
Literals = ['AppleScript', 'current application', 'false', 'linefeed',
'missing value', 'pi','quote', 'result', 'return', 'space',
'tab', 'text item delimiters', 'true', 'version']
Classes = ['alias ', 'application ', 'boolean ', 'class ', 'constant ',
'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
'real ', 'record ', 'reference ', 'RGB color ', 'script ',
'text ', 'unit types', '(Unicode )?text', 'string']
BuiltIn = ['attachment', 'attribute run', 'character', 'day', 'month',
'paragraph', 'word', 'year']
HandlerParams = ['about', 'above', 'against', 'apart from', 'around',
'aside from', 'at', 'below', 'beneath', 'beside',
'between', 'for', 'given', 'instead of', 'on', 'onto',
'out of', 'over', 'since']
Commands = ['ASCII (character|number)', 'activate', 'beep', 'choose URL',
'choose application', 'choose color', 'choose file( name)?',
'choose folder', 'choose from list',
'choose remote application', 'clipboard info',
'close( access)?', 'copy', 'count', 'current date', 'delay',
'delete', 'display (alert|dialog)', 'do shell script',
'duplicate', 'exists', 'get eof', 'get volume settings',
'info for', 'launch', 'list (disks|folder)', 'load script',
'log', 'make', 'mount volume', 'new', 'offset',
'open( (for access|location))?', 'path to', 'print', 'quit',
'random number', 'read', 'round', 'run( script)?',
'say', 'scripting components',
'set (eof|the clipboard to|volume)', 'store script',
'summarize', 'system attribute', 'system info',
'the clipboard', 'time to GMT', 'write', 'quoted form']
References = ['(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
'before', 'behind', 'every', 'front', 'index', 'last',
'middle', 'some', 'that', 'through', 'thru', 'where', 'whose']
Operators = ["and", "or", "is equal", "equals", "(is )?equal to", "is not",
"isn't", "isn't equal( to)?", "is not equal( to)?",
"doesn't equal", "does not equal", "(is )?greater than",
"comes after", "is not less than or equal( to)?",
"isn't less than or equal( to)?", "(is )?less than",
"comes before", "is not greater than or equal( to)?",
"isn't greater than or equal( to)?",
"(is )?greater than or equal( to)?", "is not less than",
"isn't less than", "does not come before",
"doesn't come before", "(is )?less than or equal( to)?",
"is not greater than", "isn't greater than",
"does not come after", "doesn't come after", "starts? with",
"begins? with", "ends? with", "contains?", "does not contain",
"doesn't contain", "is in", "is contained by", "is not in",
"is not contained by", "isn't contained by", "div", "mod",
"not", "(a )?(ref( to)?|reference to)", "is", "does"]
Control = ['considering', 'else', 'error', 'exit', 'from', 'if',
'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
'try', 'until', 'using terms from', 'while', 'whith',
'with timeout( of)?', 'with transaction', 'by', 'continue',
'end', 'its?', 'me', 'my', 'return', 'of' , 'as']
Declarations = ['global', 'local', 'prop(erty)?', 'set', 'get']
Reserved = ['but', 'put', 'returning', 'the']
StudioClasses = ['action cell', 'alert reply', 'application', 'box',
'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
'clip view', 'color well', 'color-panel',
'combo box( item)?', 'control',
'data( (cell|column|item|row|source))?', 'default entry',
'dialog reply', 'document', 'drag info', 'drawer',
'event', 'font(-panel)?', 'formatter',
'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
'movie( view)?', 'open-panel', 'outline view', 'panel',
'pasteboard', 'plugin', 'popup button',
'progress indicator', 'responder', 'save-panel',
'scroll view', 'secure text field( cell)?', 'slider',
'sound', 'split view', 'stepper', 'tab view( item)?',
'table( (column|header cell|header view|view))',
'text( (field( cell)?|view))?', 'toolbar( item)?',
'user-defaults', 'view', 'window']
StudioEvents = ['accept outline drop', 'accept table drop', 'action',
'activated', 'alert ended', 'awake from nib', 'became key',
'became main', 'begin editing', 'bounds changed',
'cell value', 'cell value changed', 'change cell value',
'change item value', 'changed', 'child of item',
'choose menu item', 'clicked', 'clicked toolbar item',
'closed', 'column clicked', 'column moved',
'column resized', 'conclude drop', 'data representation',
'deminiaturized', 'dialog ended', 'document nib name',
'double clicked', 'drag( (entered|exited|updated))?',
'drop', 'end editing', 'exposed', 'idle', 'item expandable',
'item value', 'item value changed', 'items changed',
'keyboard down', 'keyboard up', 'launched',
'load data representation', 'miniaturized', 'mouse down',
'mouse dragged', 'mouse entered', 'mouse exited',
'mouse moved', 'mouse up', 'moved',
'number of browser rows', 'number of items',
'number of rows', 'open untitled', 'opened', 'panel ended',
'parameters updated', 'plugin loaded', 'prepare drop',
'prepare outline drag', 'prepare outline drop',
'prepare table drag', 'prepare table drop',
'read from file', 'resigned active', 'resigned key',
'resigned main', 'resized( sub views)?',
'right mouse down', 'right mouse dragged',
'right mouse up', 'rows changed', 'scroll wheel',
'selected tab view item', 'selection changed',
'selection changing', 'should begin editing',
'should close', 'should collapse item',
'should end editing', 'should expand item',
'should open( untitled)?',
'should quit( after last window closed)?',
'should select column', 'should select item',
'should select row', 'should select tab view item',
'should selection change', 'should zoom', 'shown',
'update menu item', 'update parameters',
'update toolbar item', 'was hidden', 'was miniaturized',
'will become active', 'will close', 'will dismiss',
'will display browser cell', 'will display cell',
'will display item cell', 'will display outline cell',
'will finish launching', 'will hide', 'will miniaturize',
'will move', 'will open', 'will pop up', 'will quit',
'will resign active', 'will resize( sub views)?',
'will select tab view item', 'will show', 'will zoom',
'write to file', 'zoomed']
StudioCommands = ['animate', 'append', 'call method', 'center',
'close drawer', 'close panel', 'display',
'display alert', 'display dialog', 'display panel', 'go',
'hide', 'highlight', 'increment', 'item for',
'load image', 'load movie', 'load nib', 'load panel',
'load sound', 'localized string', 'lock focus', 'log',
'open drawer', 'path for', 'pause', 'perform action',
'play', 'register', 'resume', 'scroll', 'select( all)?',
'show', 'size to fit', 'start', 'step back',
'step forward', 'stop', 'synchronize', 'unlock focus',
'update']
StudioProperties = ['accepts arrow key', 'action method', 'active',
'alignment', 'allowed identifiers',
'allows branch selection', 'allows column reordering',
'allows column resizing', 'allows column selection',
'allows customization',
'allows editing text attributes',
'allows empty selection', 'allows mixed state',
'allows multiple selection', 'allows reordering',
'allows undo', 'alpha( value)?', 'alternate image',
'alternate increment value', 'alternate title',
'animation delay', 'associated file name',
'associated object', 'auto completes', 'auto display',
'auto enables items', 'auto repeat',
'auto resizes( outline column)?',
'auto save expanded items', 'auto save name',
'auto save table columns', 'auto saves configuration',
'auto scroll', 'auto sizes all columns to fit',
'auto sizes cells', 'background color', 'bezel state',
'bezel style', 'bezeled', 'border rect', 'border type',
'bordered', 'bounds( rotation)?', 'box type',
'button returned', 'button type',
'can choose directories', 'can choose files',
'can draw', 'can hide',
'cell( (background color|size|type))?', 'characters',
'class', 'click count', 'clicked( data)? column',
'clicked data item', 'clicked( data)? row',
'closeable', 'collating', 'color( (mode|panel))',
'command key down', 'configuration',
'content(s| (size|view( margins)?))?', 'context',
'continuous', 'control key down', 'control size',
'control tint', 'control view',
'controller visible', 'coordinate system',
'copies( on scroll)?', 'corner view', 'current cell',
'current column', 'current( field)? editor',
'current( menu)? item', 'current row',
'current tab view item', 'data source',
'default identifiers', 'delta (x|y|z)',
'destination window', 'directory', 'display mode',
'displayed cell', 'document( (edited|rect|view))?',
'double value', 'dragged column', 'dragged distance',
'dragged items', 'draws( cell)? background',
'draws grid', 'dynamically scrolls', 'echos bullets',
'edge', 'editable', 'edited( data)? column',
'edited data item', 'edited( data)? row', 'enabled',
'enclosing scroll view', 'ending page',
'error handling', 'event number', 'event type',
'excluded from windows menu', 'executable path',
'expanded', 'fax number', 'field editor', 'file kind',
'file name', 'file type', 'first responder',
'first visible column', 'flipped', 'floating',
'font( panel)?', 'formatter', 'frameworks path',
'frontmost', 'gave up', 'grid color', 'has data items',
'has horizontal ruler', 'has horizontal scroller',
'has parent data item', 'has resize indicator',
'has shadow', 'has sub menu', 'has vertical ruler',
'has vertical scroller', 'header cell', 'header view',
'hidden', 'hides when deactivated', 'highlights by',
'horizontal line scroll', 'horizontal page scroll',
'horizontal ruler view', 'horizontally resizable',
'icon image', 'id', 'identifier',
'ignores multiple clicks',
'image( (alignment|dims when disabled|frame style|'
'scaling))?',
'imports graphics', 'increment value',
'indentation per level', 'indeterminate', 'index',
'integer value', 'intercell spacing', 'item height',
'key( (code|equivalent( modifier)?|window))?',
'knob thickness', 'label', 'last( visible)? column',
'leading offset', 'leaf', 'level', 'line scroll',
'loaded', 'localized sort', 'location', 'loop mode',
'main( (bunde|menu|window))?', 'marker follows cell',
'matrix mode', 'maximum( content)? size',
'maximum visible columns',
'menu( form representation)?', 'miniaturizable',
'miniaturized', 'minimized image', 'minimized title',
'minimum column width', 'minimum( content)? size',
'modal', 'modified', 'mouse down state',
'movie( (controller|file|rect))?', 'muted', 'name',
'needs display', 'next state', 'next text',
'number of tick marks', 'only tick mark values',
'opaque', 'open panel', 'option key down',
'outline table column', 'page scroll', 'pages across',
'pages down', 'palette label', 'pane splitter',
'parent data item', 'parent window', 'pasteboard',
'path( (names|separator))?', 'playing',
'plays every frame', 'plays selection only', 'position',
'preferred edge', 'preferred type', 'pressure',
'previous text', 'prompt', 'properties',
'prototype cell', 'pulls down', 'rate',
'released when closed', 'repeated',
'requested print time', 'required file type',
'resizable', 'resized column', 'resource path',
'returns records', 'reuses columns', 'rich text',
'roll over', 'row height', 'rulers visible',
'save panel', 'scripts path', 'scrollable',
'selectable( identifiers)?', 'selected cell',
'selected( data)? columns?', 'selected data items?',
'selected( data)? rows?', 'selected item identifier',
'selection by rect', 'send action on arrow key',
'sends action when done editing', 'separates columns',
'separator item', 'sequence number', 'services menu',
'shared frameworks path', 'shared support path',
'sheet', 'shift key down', 'shows alpha',
'shows state by', 'size( mode)?',
'smart insert delete enabled', 'sort case sensitivity',
'sort column', 'sort order', 'sort type',
'sorted( data rows)?', 'sound', 'source( mask)?',
'spell checking enabled', 'starting page', 'state',
'string value', 'sub menu', 'super menu', 'super view',
'tab key traverses cells', 'tab state', 'tab type',
'tab view', 'table view', 'tag', 'target( printer)?',
'text color', 'text container insert',
'text container origin', 'text returned',
'tick mark position', 'time stamp',
'title(d| (cell|font|height|position|rect))?',
'tool tip', 'toolbar', 'trailing offset', 'transparent',
'treat packages as directories', 'truncated labels',
'types', 'unmodified characters', 'update views',
'use sort indicator', 'user defaults',
'uses data source', 'uses ruler',
'uses threaded animation',
'uses title from previous column', 'value wraps',
'version',
'vertical( (line scroll|page scroll|ruler view))?',
'vertically resizable', 'view',
'visible( document rect)?', 'volume', 'width', 'window',
'windows menu', 'wraps', 'zoomable', 'zoomed']
tokens = {
'root': [
(r'\s+', Text),
(ur'¬\n', String.Escape),
(r"'s\s+", Text), # This is a possessive, consider moving
(r'(--|#).*?$', Comment),
(r'\(\*', Comment.Multiline, 'comment'),
(r'[\(\){}!,.:]', Punctuation),
(ur'(«)([^»]+)(»)',
bygroups(Text, Name.Builtin, Text)),
(r'\b((?:considering|ignoring)\s*)'
r'(application responses|case|diacriticals|hyphens|'
r'numeric strings|punctuation|white space)',
bygroups(Keyword, Name.Builtin)),
(ur'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
(r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
(r'^(\s*(?:on|end)\s+)'
r'(%s)' % '|'.join(StudioEvents),
bygroups(Keyword, Name.Function)),
(r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
(r'\b(as )(%s)\b' % '|'.join(Classes),
bygroups(Keyword, Name.Class)),
(r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
(r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
(r'\b(%s)\b' % '|'.join(Control), Keyword),
(r'\b(%s)\b' % '|'.join(Declarations), Keyword),
(r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
(r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
(r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
(r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
(r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
(r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
(r'\b(%s)\b' % '|'.join(References), Name.Builtin),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r'\b(%s)\b' % Identifiers, Name.Variable),
(r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
(r'[-+]?\d+', Number.Integer),
],
'comment': [
('\(\*', Comment.Multiline, '#push'),
('\*\)', Comment.Multiline, '#pop'),
('[^*(]+', Comment.Multiline),
('[*(]', Comment.Multiline),
],
}
class ModelicaLexer(RegexLexer):
"""
For `Modelica <http://www.modelica.org/>`_ source code.
*New in Pygments 1.1.*
"""
name = 'Modelica'
aliases = ['modelica']
filenames = ['*.mo']
mimetypes = ['text/x-modelica']
flags = re.IGNORECASE | re.DOTALL
tokens = {
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'//(\n|(.|\n)*?[^\\]\n)', Comment),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment),
],
'statements': [
(r'"', String, 'string'),
(r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+)', Number.Float),
(r'\d+[Ll]?', Number.Integer),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\]{},.;]', Punctuation),
(r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin),
(r"([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*')"
r"(\.([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*'))+", Name.Class),
(r"('[\w\+\-\*\/\^]+'|\w+)", Name) ],
'root': [
include('whitespace'),
include('keywords'),
include('functions'),
include('operators'),
include('classes'),
(r'("<html>|<html>)', Name.Tag, 'html-content'),
include('statements')
],
'keywords': [
(r'(algorithm|annotation|break|connect|constant|constrainedby|'
r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
r'end|equation|exit|expandable|extends|'
r'external|false|final|flow|for|if|import|in|inner|input|'
r'loop|nondiscrete|outer|output|parameter|partial|'
r'protected|public|redeclare|replaceable|stream|time|then|true|'
r'when|while|within)\b', Keyword)
],
'functions': [
(r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|'
r'cross|div|exp|floor|log|log10|mod|rem|sign|sin|sinh|size|'
r'sqrt|tan|tanh|zeros)\b', Name.Function)
],
'operators': [
(r'(and|assert|cardinality|change|delay|der|edge|initial|'
r'noEvent|not|or|pre|reinit|return|sample|smooth|'
r'terminal|terminate)\b', Name.Builtin)
],
'classes': [
(r'(block|class|connector|function|model|package|'
r'record|type)\b', Name.Class)
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})',
String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String) # stray backslash
],
'html-content': [
(r'<\s*/\s*html\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)),
]
}
class RebolLexer(RegexLexer):
"""
A `REBOL <http://www.rebol.com/>`_ lexer.
*New in Pygments 1.1.*
"""
name = 'REBOL'
aliases = ['rebol']
filenames = ['*.r', '*.r3']
mimetypes = ['text/x-rebol']
flags = re.IGNORECASE | re.MULTILINE
re.IGNORECASE
escape_re = r'(?:\^\([0-9a-fA-F]{1,4}\)*)'
def word_callback(lexer, match):
word = match.group()
if re.match(".*:$", word):
yield match.start(), Generic.Subheading, word
elif re.match(
r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
r'while|compress|decompress|secure|open|close|read|read-io|'
r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
r'browse|launch|stats|get-modes|set-modes|to-local-file|'
r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
r'hide|draw|show|size-text|textinfo|offset-to-caret|'
r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
r'rsa-encrypt)$', word):
yield match.start(), Name.Builtin, word
elif re.match(
r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
r'minimum|maximum|negate|complement|absolute|random|head|tail|'
r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
r'copy)$', word):
yield match.start(), Name.Function, word
elif re.match(
r'(error|source|input|license|help|install|echo|Usage|with|func|'
r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
r'write-user|save-user|set-user-name|protect-system|parse-xml|'
r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
r'request-dir|center-face|do-events|net-error|decode-url|'
r'parse-header|parse-header-date|parse-email-addrs|import-email|'
r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
r'find-key-face|do-face|viewtop|confine|find-window|'
r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
r'read-thru|load-thru|do-thru|launch-thru|load-image|'
r'request-download|do-face-alt|set-font|set-para|get-style|'
r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
r'resize-face|load-stock|load-stock-block|notify|request|flash|'
r'request-color|request-pass|request-text|request-list|'
r'request-date|request-file|dbug|editor|link-relative-path|'
r'emailer|parse-error)$', word):
yield match.start(), Keyword.Namespace, word
elif re.match(
r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
r'return|exit|break)$', word):
yield match.start(), Name.Exception, word
elif re.match('REBOL$', word):
yield match.start(), Generic.Heading, word
elif re.match("to-.*", word):
yield match.start(), Keyword, word
elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
word):
yield match.start(), Operator, word
elif re.match(".*\?$", word):
yield match.start(), Keyword, word
elif re.match(".*\!$", word):
yield match.start(), Keyword.Type, word
elif re.match("'.*", word):
yield match.start(), Name.Variable.Instance, word # lit-word
elif re.match("#.*", word):
yield match.start(), Name.Label, word # issue
elif re.match("%.*", word):
yield match.start(), Name.Decorator, word # file
else:
yield match.start(), Name.Variable, word
tokens = {
'root': [
(r'\s+', Text),
(r'#"', String.Char, 'char'),
(r'#{[0-9a-fA-F]*}', Number.Hex),
(r'2#{', Number.Hex, 'bin2'),
(r'64#{[0-9a-zA-Z+/=\s]*}', Number.Hex),
(r'"', String, 'string'),
(r'{', String, 'string2'),
(r';#+.*\n', Comment.Special),
(r';\*+.*\n', Comment.Preproc),
(r';.*\n', Comment),
(r'%"', Name.Decorator, 'stringFile'),
(r'%[^(\^{^")\s\[\]]+', Name.Decorator),
(r'<[a-zA-Z0-9:._-]*>', Name.Tag),
(r'<[^(<>\s")]+', Name.Tag, 'tag'),
(r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
(r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
(r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?'
r'([\.\d+]?([+-]?\d+:\d+)?)?)?', String.Other), # date
(r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
(r'\d+[xX]\d+', Keyword.Constant), # pair
(r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
(r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
(r'[+-]?\d+(\'\d+)?', Number),
(r'[\[\]\(\)]', Generic.Strong),
(r'[a-zA-Z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
(r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
(r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
(r'comment\s', Comment, 'comment'),
(r'/[^(\^{^")\s/[\]]*', Name.Attribute),
(r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
(r'([^(\^{^")\s]+)', Text),
],
'string': [
(r'[^(\^")]+', String),
(escape_re, String.Escape),
(r'[\(|\)]+', String),
(r'\^.', String.Escape),
(r'"', String, '#pop'),
],
'string2': [
(r'[^(\^{^})]+', String),
(escape_re, String.Escape),
(r'[\(|\)]+', String),
(r'\^.', String.Escape),
(r'{', String, '#push'),
(r'}', String, '#pop'),
],
'stringFile': [
(r'[^(\^")]+', Name.Decorator),
(escape_re, Name.Decorator),
(r'\^.', Name.Decorator),
(r'"', Name.Decorator, '#pop'),
],
'char': [
(escape_re + '"', String.Char, '#pop'),
(r'\^."', String.Char, '#pop'),
(r'."', String.Char, '#pop'),
],
'tag': [
(escape_re, Name.Tag),
(r'"', Name.Tag, 'tagString'),
(r'[^(<>\r\n")]+', Name.Tag),
(r'>', Name.Tag, '#pop'),
],
'tagString': [
(r'[^(\^")]+', Name.Tag),
(escape_re, Name.Tag),
(r'[\(|\)]+', Name.Tag),
(r'\^.', Name.Tag),
(r'"', Name.Tag, '#pop'),
],
'tuple': [
(r'(\d+\.)+', Keyword.Constant),
(r'\d+', Keyword.Constant, '#pop'),
],
'bin2': [
(r'\s+', Number.Hex),
(r'([0-1]\s*){8}', Number.Hex),
(r'}', Number.Hex, '#pop'),
],
'comment': [
(r'"', Comment, 'commentString1'),
(r'{', Comment, 'commentString2'),
(r'\[', Comment, 'commentBlock'),
(r'[^(\s{\"\[]+', Comment, '#pop'),
],
'commentString1': [
(r'[^(\^")]+', Comment),
(escape_re, Comment),
(r'[\(|\)]+', Comment),
(r'\^.', Comment),
(r'"', Comment, '#pop'),
],
'commentString2': [
(r'[^(\^{^})]+', Comment),
(escape_re, Comment),
(r'[\(|\)]+', Comment),
(r'\^.', Comment),
(r'{', Comment, '#push'),
(r'}', Comment, '#pop'),
],
'commentBlock': [
(r'\[',Comment, '#push'),
(r'\]',Comment, '#pop'),
(r'[^(\[\])]*', Comment),
],
}
class ABAPLexer(RegexLexer):
"""
Lexer for ABAP, SAP's integrated language.
*New in Pygments 1.1.*
"""
name = 'ABAP'
aliases = ['abap']
filenames = ['*.abap']
mimetypes = ['text/x-abap']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'common': [
(r'\s+', Text),
(r'^\*.*$', Comment.Single),
(r'\".*?\n', Comment.Single),
],
'variable-names': [
(r'<[\S_]+>', Name.Variable),
(r'[\w][\w_~]*(?:(\[\])|->\*)?', Name.Variable),
],
'root': [
include('common'),
#function calls
(r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
bygroups(Keyword, Text, Name.Function)),
(r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
r'TRANSACTION|TRANSFORMATION))\b',
Keyword),
(r'(FORM|PERFORM)(\s+)([\w_]+)',
bygroups(Keyword, Text, Name.Function)),
(r'(PERFORM)(\s+)(\()([\w_]+)(\))',
bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation )),
(r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
bygroups(Keyword, Text, Name.Function, Text, Keyword)),
# method implementation
(r'(METHOD)(\s+)([\w_~]+)',
bygroups(Keyword, Text, Name.Function)),
# method calls
(r'(\s+)([\w_\-]+)([=\-]>)([\w_\-~]+)',
bygroups(Text, Name.Variable, Operator, Name.Function)),
# call methodnames returning style
(r'(?<=(=|-)>)([\w_\-~]+)(?=\()', Name.Function),
# keywords with dashes in them.
# these need to be first, because for instance the -ID part
# of MESSAGE-ID wouldn't get highlighted if MESSAGE was
# first in the list of keywords.
(r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
r'INTERFACE-POOL|INVERTED-DATE|'
r'LOAD-OF-PROGRAM|LOG-POINT|'
r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
r'OUTPUT-LENGTH|PRINT-CONTROL|'
r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
r'TYPE-POOL|TYPE-POOLS'
r')\b', Keyword),
# keyword kombinations
(r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
r'FREE\s(MEMORY|OBJECT)?|'
r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
r'SKIP|ULINE)|'
r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
r'TO LIST-PROCESSING|TO TRANSACTION)'
r'(ENDING|STARTING)\s+AT|'
r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
r'(BEGIN|END)\s+OF|'
r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
r'COMPARING(\s+ALL\s+FIELDS)?|'
r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
r'END-OF-(DEFINITION|PAGE|SELECTION)|'
r'WITH\s+FRAME(\s+TITLE)|'
# simple kombinations
r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
# single word keywords.
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
r'ASSIGN(ING)?|AT(\s+FIRST)?|'
r'BACK|BLOCK|BREAK-POINT|'
r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
r'DETAIL|DIRECTORY|DIVIDE|DO|'
r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
r'HIDE|'
r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
r'LENGTH|LINES|LOAD|LOCAL|'
r'JOIN|'
r'KEY|'
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
r'NODES|'
r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
r'ULINE|UNDER|UNPACK|UPDATE|USING|'
r'VALUE|VALUES|VIA|'
r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
# builtins
(r'(abs|acos|asin|atan|'
r'boolc|boolx|bit_set|'
r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
r'count|count_any_of|count_any_not_of|'
r'dbmaxlen|distance|'
r'escape|exp|'
r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
r'insert|'
r'lines|log|log10|'
r'match|matches|'
r'nmax|nmin|numofchar|'
r'repeat|replace|rescale|reverse|round|'
r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
r'substring|substring_after|substring_from|substring_before|substring_to|'
r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
(r'&[0-9]', Name),
(r'[0-9]+', Number.Integer),
# operators which look like variable names before
# parsing variable names.
(r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
include('variable-names'),
# standard oparators after variable names,
# because < and > are part of field symbols.
(r'[?*<>=\-+]', Operator),
(r"'(''|[^'])*'", String.Single),
(r'[/;:()\[\],\.]', Punctuation)
],
}
class NewspeakLexer(RegexLexer):
"""
For `Newspeak <http://newspeaklanguage.org/>` syntax.
"""
name = 'Newspeak'
filenames = ['*.ns2']
aliases = ['newspeak', ]
mimetypes = ['text/x-newspeak']
tokens = {
'root' : [
(r'\b(Newsqueak2)\b',Keyword.Declaration),
(r"'[^']*'",String),
(r'\b(class)(\s+)([a-zA-Z0-9_]+)(\s*)',
bygroups(Keyword.Declaration,Text,Name.Class,Text)),
(r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
Keyword),
(r'([a-zA-Z0-9_]+\:)(\s*)([a-zA-Z_]\w+)',
bygroups(Name.Function,Text,Name.Variable)),
(r'([a-zA-Z0-9_]+)(\s*)(=)',
bygroups(Name.Attribute,Text,Operator)),
(r'<[a-zA-Z0-9_]+>', Comment.Special),
include('expressionstat'),
include('whitespace')
],
'expressionstat': [
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'\d+', Number.Integer),
(r':\w+',Name.Variable),
(r'(\w+)(::)', bygroups(Name.Variable, Operator)),
(r'\w+:', Name.Function),
(r'\w+', Name.Variable),
(r'\(|\)', Punctuation),
(r'\[|\]', Punctuation),
(r'\{|\}', Punctuation),
(r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
(r'\.|;', Punctuation),
include('whitespace'),
include('literals'),
],
'literals': [
(r'\$.', String),
(r"'[^']*'", String),
(r"#'[^']*'", String.Symbol),
(r"#\w+:?", String.Symbol),
(r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
],
'whitespace' : [
(r'\s+', Text),
(r'"[^"]*"', Comment)
]
}
class GherkinLexer(RegexLexer):
"""
For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
*New in Pygments 1.2.*
"""
name = 'Gherkin'
aliases = ['Cucumber', 'cucumber', 'Gherkin', 'gherkin']
filenames = ['*.feature']
mimetypes = ['text/x-gherkin']
feature_keywords = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
feature_element_keywords = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
examples_keywords = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
step_keywords = ur'^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
tokens = {
'comments': [
(r'#.*$', Comment),
],
'feature_elements' : [
(step_keywords, Keyword, "step_content_stack"),
include('comments'),
(r"(\s|.)", Name.Function),
],
'feature_elements_on_stack' : [
(step_keywords, Keyword, "#pop:2"),
include('comments'),
(r"(\s|.)", Name.Function),
],
'examples_table': [
(r"\s+\|", Keyword, 'examples_table_header'),
include('comments'),
(r"(\s|.)", Name.Function),
],
'examples_table_header': [
(r"\s+\|\s*$", Keyword, "#pop:2"),
include('comments'),
(r"\s*\|", Keyword),
(r"[^\|]", Name.Variable),
],
'scenario_sections_on_stack': [
(feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"),
],
'narrative': [
include('scenario_sections_on_stack'),
(r"(\s|.)", Name.Function),
],
'table_vars': [
(r'(<[^>]+>)', Name.Variable),
],
'numbers': [
(r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
],
'string': [
include('table_vars'),
(r'(\s|.)', String),
],
'py_string': [
(r'"""', Keyword, "#pop"),
include('string'),
],
'step_content_root':[
(r"$", Keyword, "#pop"),
include('step_content'),
],
'step_content_stack':[
(r"$", Keyword, "#pop:2"),
include('step_content'),
],
'step_content':[
(r'"', Name.Function, "double_string"),
include('table_vars'),
include('numbers'),
include('comments'),
(r'(\s|.)', Name.Function),
],
'table_content': [
(r"\s+\|\s*$", Keyword, "#pop"),
include('comments'),
(r"\s*\|", Keyword),
include('string'),
],
'double_string': [
(r'"', Name.Function, "#pop"),
include('string'),
],
'root': [
(r'\n', Name.Function),
include('comments'),
(r'"""', Keyword, "py_string"),
(r'\s+\|', Keyword, 'table_content'),
(r'"', Name.Function, "double_string"),
include('table_vars'),
include('numbers'),
(r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
(step_keywords, bygroups(Name.Function, Keyword), "step_content_root"),
(feature_keywords, bygroups(Keyword, Keyword, Name.Function), 'narrative'),
(feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements"),
(examples_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "examples_table"),
(r'(\s|.)', Name.Function),
]
}
class AsymptoteLexer(RegexLexer):
"""
For `Asymptote <http://asymptote.sf.net/>`_ source code.
*New in Pygments 1.2.*
"""
name = 'Asymptote'
aliases = ['asy', 'asymptote']
filenames = ['*.asy']
mimetypes = ['text/x-asymptote']
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'//(\n|(.|\n)*?[^\\]\n)', Comment),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment),
],
'statements': [
# simple string (TeX friendly)
(r'"(\\\\|\\"|[^"])*"', String),
# C style string (with character escapes)
(r"'", String, 'string'),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
(r'0[0-7]+[Ll]?', Number.Oct),
(r'\d+[Ll]?', Number.Integer),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.]', Punctuation),
(r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
(r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
r'return|break|continue|struct|typedef|new|access|import|'
r'unravel|from|include|quote|static|public|private|restricted|'
r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
# Since an asy-type-name can be also an asy-function-name,
# in the following we test if the string " [a-zA-Z]" follows
# the Keyword.Type.
# Of course it is not perfect !
(r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
r'path3|pen|picture|point|position|projection|real|revolution|'
r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
r'transformation|tree|triangle|trilinear|triple|vector|'
r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
# Now the asy-type-name which are not asy-function-name
# except yours !
# Perhaps useless
(r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
r'picture|position|real|revolution|slice|splitface|ticksgridT|'
r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
],
'root': [
include('whitespace'),
# functions
(r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*\([^;]*?\))' # signature
r'(' + _ws + r')({)',
bygroups(using(this), Name.Function, using(this), using(this),
Punctuation),
'function'),
# function declarations
(r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*\([^;]*?\))' # signature
r'(' + _ws + r')(;)',
bygroups(using(this), Name.Function, using(this), using(this),
Punctuation)),
('', Text, 'statement'),
],
'statement' : [
include('whitespace'),
include('statements'),
('[{}]', Punctuation),
(';', Punctuation, '#pop'),
],
'function': [
include('whitespace'),
include('statements'),
(';', Punctuation),
('{', Punctuation, '#push'),
('}', Punctuation, '#pop'),
],
'string': [
(r"'", String, '#pop'),
(r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'\n', String),
(r"[^\\'\n]+", String), # all other characters
(r'\\\n', String),
(r'\\n', String), # line continuation
(r'\\', String), # stray backslash
]
}
def get_tokens_unprocessed(self, text):
from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
if token is Name and value in ASYFUNCNAME:
token = Name.Function
elif token is Name and value in ASYVARNAME:
token = Name.Variable
yield index, token, value
class PostScriptLexer(RegexLexer):
"""
Lexer for PostScript files.
The PostScript Language Reference published by Adobe at
<http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
is the authority for this.
*New in Pygments 1.4.*
"""
name = 'PostScript'
aliases = ['postscript']
filenames = ['*.ps', '*.eps']
mimetypes = ['application/postscript']
delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
delimiter_end = r'(?=[%s])' % delimiter
valid_name_chars = r'[^%s]' % delimiter
valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
tokens = {
'root': [
# All comment types
(r'^%!.+\n', Comment.Preproc),
(r'%%.*\n', Comment.Special),
(r'(^%.*\n){2,}', Comment.Multiline),
(r'%.*\n', Comment.Single),
# String literals are awkward; enter separate state.
(r'\(', String, 'stringliteral'),
(r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
# Numbers
(r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
# Slight abuse: use Oct to signify any explicit base system
(r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
(r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+ delimiter_end, Number.Float),
(r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
# References
(r'\/%s' % valid_name, Name.Variable),
# Names
(valid_name, Name.Function), # Anything else is executed
# These keywords taken from
# <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
# Is there an authoritative list anywhere that doesn't involve
# trawling documentation?
(r'(false|true)' + delimiter_end, Keyword.Constant),
# Conditionals / flow control
(r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
+ delimiter_end, Keyword.Reserved),
('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
'syntaxerror|transform|translate|truncate|typecheck|undefined|'
'undefinedfilename|undefinedresult)' + delimiter_end,
Name.Builtin),
(r'\s+', Text),
],
'stringliteral': [
(r'[^\(\)\\]+', String),
(r'\\', String.Escape, 'escape'),
(r'\(', String, '#push'),
(r'\)', String, '#pop'),
],
'escape': [
(r'([0-8]{3}|n|r|t|b|f|\\|\(|\)|)', String.Escape, '#pop'),
],
}
class AutohotkeyLexer(RegexLexer):
"""
For `autohotkey <http://www.autohotkey.com/>`_ source code.
*New in Pygments 1.4.*
"""
name = 'autohotkey'
aliases = ['ahk']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'^\(', String, 'continuation'),
include('comments'),
(r'(^\s*)(\w+)(\s*)(=)',
bygroups(Text.Whitespace, Name, Text.Whitespace, Operator),
'command'),
(r'([\w#@$?\[\]]+)(\s*)(\()',
bygroups(Name.Function, Text.Whitespace, Punctuation),
'parameters'),
include('directives'),
include('labels'),
include('commands'),
include('expressions'),
include('numbers'),
include('literals'),
include('keynames'),
include('keywords'),
],
'command': [
include('comments'),
include('whitespace'),
(r'^\(', String, 'continuation'),
(r'[^\n]*?(?=;*|$)', String, '#pop'),
include('numbers'),
include('literals'),
],
'expressions': [
include('comments'),
include('whitespace'),
include('numbers'),
include('literals'),
(r'([]\w#@$?[]+)(\s*)(\()',
bygroups(Name.Function, Text.Whitespace, Punctuation),
'parameters'),
(r'A_\w+', Name.Builtin),
(r'%[]\w#@$?[]+?%', Name.Variable),
# blocks: if, else, function definitions
(r'{', Punctuation, 'block'),
# parameters in function calls
],
'literals': [
(r'"', String, 'string'),
(r'A_\w+', Name.Builtin),
(r'%[]\w#@$?[]+?%', Name.Variable),
(r'[-~!%^&*+|?:<>/=]=?', Operator, 'expressions'),
(r'==', Operator, 'expressions'),
('[{()},.%#`;]', Punctuation),
(r'\\', Punctuation),
include('keywords'),
(r'\w+', Text),
],
'string': [
(r'"', String, '#pop'),
(r'""|`.', String.Escape),
(r'[^\`"\n]+', String), # all other characters
],
'block': [
include('root'),
('{', Punctuation, '#push'),
('}', Punctuation, '#pop'),
],
'parameters': [
(r'\)', Punctuation, '#pop'),
(r'\(', Punctuation, '#push'),
include('numbers'),
include('literals'),
include('whitespace'),
],
'keywords': [
(r'(static|global|local)\b', Keyword.Type),
(r'(if|else|and|or)\b', Keyword.Reserved),
],
'directives': [
(r'#\w+?\s', Keyword),
],
'labels': [
# hotkeys and labels
# technically, hotkey names are limited to named keys and buttons
(r'(^\s*)([^:\s]+?:{1,2})', bygroups(Text.Whitespace, Name.Label)),
# hotstrings
(r'(^\s*)(::[]\w#@$?[]+?::)', bygroups(Text.Whitespace, Name.Label)),
],
'comments': [
(r'^;+.*?$', Comment.Single), # beginning of line comments
(r'(?<=\s);+.*?$', Comment.Single), # end of line comments
(r'^/\*.*?\n\*/', Comment.Multiline),
(r'(?<!\n)/\*.*?\n\*/', Error), # must be at start of line
],
'whitespace': [
(r'[ \t]+', Text.Whitespace),
],
'numbers': [
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+[eE][+-]?[0-9]+', Number.Float),
(r'0[0-7]+', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'\d+L', Number.Integer.Long),
(r'\d+', Number.Integer)
],
'continuation': [
(r'\n\)', Punctuation, '#pop'),
(r'\s[^\n\)]+', String),
],
'keynames': [
(r'\[[^\]]+\]', Keyword, 'keynames')
],
'commands': [
(r'(autotrim|blockinput|break|click|'
r'clipwait|continue|control|'
r'controlclick|controlfocus|controlget|'
r'controlgetfocus|controlgetpos|controlgettext|'
r'controlmove|controlsend|controlsendraw|'
r'controlsettext|coordmode|critical|'
r'detecthiddentext|detecthiddenwindows|'
r'dllcall|drive|'
r'driveget|drivespacefree|'
r'else|envadd|envdiv|'
r'envget|envmult|envset|'
r'envsub|envupdate|exit|'
r'exitapp|fileappend|filecopy|'
r'filecopydir|filecreatedir|filecreateshortcut|'
r'filedelete|filegetattrib|filegetshortcut|'
r'filegetsize|filegettime|filegetversion|'
r'fileinstall|filemove|filemovedir|'
r'fileread|filereadline|filerecycle|'
r'filerecycleempty|fileremovedir|fileselectfile|'
r'fileselectfolder|filesetattrib|filesettime|'
r'formattime|gosub|'
r'goto|groupactivate|groupadd|'
r'groupclose|groupdeactivate|gui|'
r'guicontrol|guicontrolget|hotkey|'
r'ifexist|ifgreater|ifgreaterorequal|'
r'ifinstring|ifless|iflessorequal|'
r'ifmsgbox|ifnotequal|ifnotexist|'
r'ifnotinstring|ifwinactive|ifwinexist|'
r'ifwinnotactive|ifwinnotexist|imagesearch|'
r'inidelete|iniread|iniwrite|'
r'input|inputbox|keyhistory|'
r'keywait|listhotkeys|listlines|'
r'listvars|loop|'
r'menu|mouseclick|mouseclickdrag|'
r'mousegetpos|mousemove|msgbox|'
r'onmessage|onexit|outputdebug|'
r'pixelgetcolor|pixelsearch|postmessage|'
r'process|progress|random|'
r'regexmatch|regexreplace|registercallback|'
r'regdelete|regread|regwrite|'
r'reload|repeat|return|'
r'run|runas|runwait|'
r'send|sendevent|sendinput|'
r'sendmessage|sendmode|sendplay|'
r'sendraw|setbatchlines|setcapslockstate|'
r'setcontroldelay|setdefaultmousespeed|setenv|'
r'setformat|setkeydelay|setmousedelay|'
r'setnumlockstate|setscrolllockstate|'
r'setstorecapslockmode|'
r'settimer|settitlematchmode|setwindelay|'
r'setworkingdir|shutdown|sleep|'
r'sort|soundbeep|soundget|'
r'soundgetwavevolume|soundplay|soundset|'
r'soundsetwavevolume|splashimage|splashtextoff|'
r'splashtexton|splitpath|statusbargettext|'
r'statusbarwait|stringcasesense|stringgetpos|'
r'stringleft|stringlen|stringlower|'
r'stringmid|stringreplace|stringright|'
r'stringsplit|stringtrimleft|stringtrimright|'
r'stringupper|suspend|sysget|'
r'thread|tooltip|transform|'
r'traytip|urldownloadtofile|while|'
r'varsetcapacity|'
r'winactivate|winactivatebottom|winclose|'
r'winget|wingetactivestats|wingetactivetitle|'
r'wingetclass|wingetpos|wingettext|'
r'wingettitle|winhide|winkill|'
r'winmaximize|winmenuselectitem|winminimize|'
r'winminimizeall|winminimizeallundo|winmove|'
r'winrestore|winset|winsettitle|'
r'winshow|winwait|winwaitactive|'
r'winwaitclose|winwaitnotactive'
r'true|false|NULL)\b', Keyword, 'command'),
],
}
class MaqlLexer(RegexLexer):
"""
Lexer for `GoodData MAQL <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
scripts.
*New in Pygments 1.4.*
"""
name = 'MAQL'
aliases = ['maql']
filenames = ['*.maql']
mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
flags = re.IGNORECASE
tokens = {
'root': [
# IDENTITY
(r'IDENTIFIER\b', Name.Builtin),
# IDENTIFIER
(r'\{[^}]+\}', Name.Variable),
# NUMBER
(r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
# STRING
(r'"', Literal.String, 'string-literal'),
# RELATION
(r'\<\>|\!\=', Operator),
(r'\=|\>\=|\>|\<\=|\<', Operator),
# :=
(r'\:\=', Operator),
# OBJECT
(r'\[[^]]+\]', Name.Variable.Class),
# keywords
(r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
# FUNCNAME
(r'[a-zA-Z]\w*\b', Name.Function),
# Comments
(r'#.*', Comment.Single),
# Punctuation
(r'[,;\(\)]', Token.Punctuation),
# Space is not significant
(r'\s+', Text)
],
'string-literal': [
(r'\\[tnrfbae"\\]', String.Escape),
(r'"', Literal.String, '#pop'),
(r'[^\\"]+', Literal.String)
],
}
class GoodDataCLLexer(RegexLexer):
"""
Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
script files.
*New in Pygments 1.4.*
"""
name = 'GoodData-CL'
aliases = ['gooddata-cl']
filenames = ['*.gdc']
mimetypes = ['text/x-gooddata-cl']
flags = re.IGNORECASE
tokens = {
'root': [
# Comments
(r'#.*', Comment.Single),
# Function call
(r'[a-zA-Z]\w*', Name.Function),
# Argument list
(r'\(', Token.Punctuation, 'args-list'),
# Punctuation
(r';', Token.Punctuation),
# Space is not significant
(r'\s+', Text)
],
'args-list': [
(r'\)', Token.Punctuation, '#pop'),
(r',', Token.Punctuation),
(r'[a-zA-Z]\w*', Name.Variable),
(r'=', Operator),
(r'"', Literal.String, 'string-literal'),
(r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
# Space is not significant
(r'\s', Text)
],
'string-literal': [
(r'\\[tnrfbae"\\]', String.Escape),
(r'"', Literal.String, '#pop'),
(r'[^\\"]+', Literal.String)
]
}
class ProtoBufLexer(RegexLexer):
"""
Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
definition files.
*New in Pygments 1.4.*
"""
name = 'Protocol Buffer'
aliases = ['protobuf']
filenames = ['*.proto']
tokens = {
'root': [
(r'[ \t]+', Text),
(r'[,;{}\[\]\(\)]', Punctuation),
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'\b(import|option|optional|required|repeated|default|packed|'
r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
(r'(int32|int64|uint32|uint64|sint32|sint64|'
r'fixed32|fixed64|sfixed32|sfixed64|'
r'float|double|bool|string|bytes)\b', Keyword.Type),
(r'(true|false)\b', Keyword.Constant),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
(r'(message|extend)(\s+)',
bygroups(Keyword.Declaration, Text), 'message'),
(r'(enum|group|service)(\s+)',
bygroups(Keyword.Declaration, Text), 'type'),
(r'\".*\"', String),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'(\-?(inf|nan))', Number.Float),
(r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
(r'0[0-7]+[LlUu]*', Number.Oct),
(r'\d+[LlUu]*', Number.Integer),
(r'[+-=]', Operator),
(r'([a-zA-Z_][a-zA-Z0-9_\.]*)([ \t]*)(=)',
bygroups(Name.Attribute, Text, Operator)),
('[a-zA-Z_][a-zA-Z0-9_\.]*', Name),
],
'package': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Namespace, '#pop')
],
'message': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'type': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name, '#pop')
],
}
class HybrisLexer(RegexLexer):
"""
For `Hybris <http://www.hybris-lang.org>`_ source code.
*New in Pygments 1.4.*
"""
name = 'Hybris'
aliases = ['hybris', 'hy']
filenames = ['*.hy', '*.hyb']
mimetypes = ['text/x-hybris', 'application/x-hybris']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:function|method|operator\s+)+?)'
r'([a-zA-Z_][a-zA-Z0-9_]*)'
r'(\s*)(\()', bygroups(Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
(r'(extends|private|protected|public|static|throws|function|method|'
r'operator)\b', Keyword.Declaration),
(r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
r'__INC_PATH__)\b', Keyword.Constant),
(r'(class|struct)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import|include)(\s+)',
bygroups(Keyword.Namespace, Text), 'import'),
(r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
r'user_functions|dyn_functions|methods|call|call_method|mknod|'
r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
r'send|close|print|println|printf|input|readline|serial_open|'
r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
r'remove|contains|join)\b', Name.Builtin),
(r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
r'Exception)\b', Keyword.Type),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text),
],
'class': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
}
| mit |
tdtrask/ansible | test/units/modules/network/onyx/test_onyx_vlan.py | 15 | 3728 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_vlan
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxVlanModule(TestOnyxModule):
module = onyx_vlan
def setUp(self):
super(TestOnyxVlanModule, self).setUp()
self.mock_get_config = patch.object(
onyx_vlan.OnyxVlanModule, "_get_vlan_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxVlanModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
config_file = 'onyx_vlan_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_vlan_no_change(self):
set_module_args(dict(vlan_id=20))
self.execute_module(changed=False)
def test_vlan_remove_name(self):
set_module_args(dict(vlan_id=10, name=''))
commands = ['vlan 10 no name']
self.execute_module(changed=True, commands=commands)
def test_vlan_change_name(self):
set_module_args(dict(vlan_id=10, name='test-test'))
commands = ['vlan 10 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_create(self):
set_module_args(dict(vlan_id=30))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_create_with_name(self):
set_module_args(dict(vlan_id=30, name='test-test'))
commands = ['vlan 30', 'exit', 'vlan 30 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove(self):
set_module_args(dict(vlan_id=20, state='absent'))
commands = ['no vlan 20']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove_not_exist(self):
set_module_args(dict(vlan_id=30, state='absent'))
self.execute_module(changed=False)
def test_vlan_aggregate(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_aggregate_purge(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate, purge=True))
commands = ['vlan 30', 'exit', 'no vlan 10', 'no vlan 1']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
wunderlins/learning | python/zodb/lib/osx/ZODB/MappingStorage.py | 2 | 11501 | ##############################################################################
#
# Copyright (c) Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""A simple in-memory mapping-based ZODB storage
This storage provides an example implementation of a fairly full
storage without distracting storage details.
"""
import BTrees
import time
import threading
import ZODB.BaseStorage
import ZODB.interfaces
import ZODB.POSException
import ZODB.TimeStamp
import ZODB.utils
import zope.interface
@zope.interface.implementer(
ZODB.interfaces.IStorage,
ZODB.interfaces.IStorageIteration,
)
class MappingStorage(object):
def __init__(self, name='MappingStorage'):
self.__name__ = name
self._data = {} # {oid->{tid->pickle}}
self._transactions = BTrees.OOBTree.OOBTree() # {tid->TransactionRecord}
self._ltid = ZODB.utils.z64
self._last_pack = None
_lock = threading.RLock()
self._lock_acquire = _lock.acquire
self._lock_release = _lock.release
self._commit_lock = threading.Lock()
self._opened = True
self._transaction = None
self._oid = 0
######################################################################
# Preconditions:
def opened(self):
"""The storage is open
"""
return self._opened
def not_in_transaction(self):
"""The storage is not committing a transaction
"""
return self._transaction is None
#
######################################################################
# testing framework (lame)
def cleanup(self):
pass
# ZODB.interfaces.IStorage
@ZODB.utils.locked
def close(self):
self._opened = False
# ZODB.interfaces.IStorage
def getName(self):
return self.__name__
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def getSize(self):
size = 0
for oid, tid_data in self._data.items():
size += 50
for tid, pickle in tid_data.items():
size += 100+len(pickle)
return size
# ZEO.interfaces.IServeable
@ZODB.utils.locked(opened)
def getTid(self, oid):
tid_data = self._data.get(oid)
if tid_data:
return tid_data.maxKey()
raise ZODB.POSException.POSKeyError(oid)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def history(self, oid, size=1):
tid_data = self._data.get(oid)
if not tid_data:
raise ZODB.POSException.POSKeyError(oid)
tids = tid_data.keys()[-size:]
tids.reverse()
return [
dict(
time = ZODB.TimeStamp.TimeStamp(tid),
tid = tid,
serial = tid,
user_name = self._transactions[tid].user,
description = self._transactions[tid].description,
extension = self._transactions[tid].extension,
size = len(tid_data[tid])
)
for tid in tids]
# ZODB.interfaces.IStorage
def isReadOnly(self):
return False
# ZODB.interfaces.IStorageIteration
def iterator(self, start=None, end=None):
for transaction_record in self._transactions.values(start, end):
yield transaction_record
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def lastTransaction(self):
return self._ltid
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def __len__(self):
return len(self._data)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def load(self, oid, version=''):
assert not version, "Versions are not supported"
tid_data = self._data.get(oid)
if tid_data:
tid = tid_data.maxKey()
return tid_data[tid], tid
raise ZODB.POSException.POSKeyError(oid)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def loadBefore(self, oid, tid):
tid_data = self._data.get(oid)
if tid_data:
before = ZODB.utils.u64(tid)
if not before:
return None
before = ZODB.utils.p64(before-1)
tids_before = tid_data.keys(None, before)
if tids_before:
tids_after = tid_data.keys(tid, None)
tid = tids_before[-1]
return (tid_data[tid], tid,
(tids_after and tids_after[0] or None)
)
else:
raise ZODB.POSException.POSKeyError(oid)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def loadSerial(self, oid, serial):
tid_data = self._data.get(oid)
if tid_data:
try:
return tid_data[serial]
except KeyError:
pass
raise ZODB.POSException.POSKeyError(oid, serial)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def new_oid(self):
self._oid += 1
return ZODB.utils.p64(self._oid)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def pack(self, t, referencesf, gc=True):
if not self._data:
return
stop = ZODB.TimeStamp.TimeStamp(*time.gmtime(t)[:5]+(t%60,)).raw()
if self._last_pack is not None and self._last_pack >= stop:
if self._last_pack == stop:
return
raise ValueError("Already packed to a later time")
self._last_pack = stop
transactions = self._transactions
# Step 1, remove old non-current records
for oid, tid_data in self._data.items():
tids_to_remove = tid_data.keys(None, stop)
if tids_to_remove:
tids_to_remove.pop() # Keep the last, if any
if tids_to_remove:
for tid in tids_to_remove:
del tid_data[tid]
if transactions[tid].pack(oid):
del transactions[tid]
if gc:
# Step 2, GC. A simple sweep+copy
new_data = BTrees.OOBTree.OOBTree()
to_copy = set([ZODB.utils.z64])
while to_copy:
oid = to_copy.pop()
tid_data = self._data.pop(oid)
new_data[oid] = tid_data
for pickle in tid_data.values():
for oid in referencesf(pickle):
if oid in new_data:
continue
to_copy.add(oid)
# Remove left over data from transactions
for oid, tid_data in self._data.items():
for tid in tid_data:
if transactions[tid].pack(oid):
del transactions[tid]
self._data.clear()
self._data.update(new_data)
# ZODB.interfaces.IStorage
def registerDB(self, db):
pass
# ZODB.interfaces.IStorage
def sortKey(self):
return self.__name__
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def store(self, oid, serial, data, version, transaction):
assert not version, "Versions are not supported"
if transaction is not self._transaction:
raise ZODB.POSException.StorageTransactionError(self, transaction)
old_tid = None
tid_data = self._data.get(oid)
if tid_data:
old_tid = tid_data.maxKey()
if serial != old_tid:
raise ZODB.POSException.ConflictError(
oid=oid, serials=(old_tid, serial), data=data)
self._tdata[oid] = data
return self._tid
checkCurrentSerialInTransaction = (
ZODB.BaseStorage.checkCurrentSerialInTransaction)
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def tpc_abort(self, transaction):
if transaction is not self._transaction:
return
self._transaction = None
self._commit_lock.release()
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def tpc_begin(self, transaction, tid=None):
# The tid argument exists to support testing.
if transaction is self._transaction:
raise ZODB.POSException.StorageTransactionError(
"Duplicate tpc_begin calls for same transaction")
self._lock_release()
self._commit_lock.acquire()
self._lock_acquire()
self._transaction = transaction
self._tdata = {}
if tid is None:
if self._transactions:
old_tid = self._transactions.maxKey()
else:
old_tid = None
tid = ZODB.utils.newTid(old_tid)
self._tid = tid
# ZODB.interfaces.IStorage
@ZODB.utils.locked(opened)
def tpc_finish(self, transaction, func = lambda tid: None):
if (transaction is not self._transaction):
raise ZODB.POSException.StorageTransactionError(
"tpc_finish called with wrong transaction")
tid = self._tid
func(tid)
tdata = self._tdata
for oid in tdata:
tid_data = self._data.get(oid)
if tid_data is None:
tid_data = BTrees.OOBTree.OOBucket()
self._data[oid] = tid_data
tid_data[tid] = tdata[oid]
self._ltid = tid
self._transactions[tid] = TransactionRecord(tid, transaction, tdata)
self._transaction = None
del self._tdata
self._commit_lock.release()
# ZEO.interfaces.IServeable
@ZODB.utils.locked(opened)
def tpc_transaction(self):
return self._transaction
# ZODB.interfaces.IStorage
def tpc_vote(self, transaction):
if transaction is not self._transaction:
raise ZODB.POSException.StorageTransactionError(
"tpc_vote called with wrong transaction")
class TransactionRecord:
status = ' '
def __init__(self, tid, transaction, data):
self.tid = tid
self.user = transaction.user
self.description = transaction.description
extension = transaction._extension
self.extension = extension
self.data = data
_extension = property(lambda self: self._extension,
lambda self, v: setattr(self, '_extension', v),
)
def __iter__(self):
for oid, data in self.data.items():
yield DataRecord(oid, self.tid, data)
def pack(self, oid):
self.status = 'p'
del self.data[oid]
return not self.data
@zope.interface.implementer(ZODB.interfaces.IStorageRecordInformation)
class DataRecord(object):
"""Abstract base class for iterator protocol"""
version = ''
data_txn = None
def __init__(self, oid, tid, data):
self.oid = oid
self.tid = tid
self.data = data
def DB(*args, **kw):
return ZODB.DB(MappingStorage(), *args, **kw)
| gpl-2.0 |
muhleder/timestrap | timestrap/settings/base.py | 1 | 3215 | import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)
)
)
)
# SECURITY WARNING: set this to your domain name in production!
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'conf',
'core',
'api',
'widget_tweaks',
'django_filters',
'rest_framework',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'conf.middleware.site.SiteMiddleware',
'conf.middleware.i18n.I18NMiddleware',
]
ROOT_URLCONF = 'timestrap.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'timestrap/templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'timestrap.wsgi.application'
# Authentication
# https://docs.djangoproject.com/en/1.11/topics/auth/default/
AUTHENTICATION_BACKENDS = [
'conf.backends.auth.SitePermissionBackend',
]
LOGIN_REDIRECT_URL = '/timesheet/'
LOGIN_URL = '/login/'
LOGOUT_REDIRECT_URL = '/login/'
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Email settings
# https://docs.djangoproject.com/en/1.11/topics/email/
EMAIL_BACKEND = 'conf.backends.mail.EmailBackend'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'timestrap/static'),
]
# Rest framework
# http://www.django-rest-framework.org/
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'api.permissions.TimestrapDjangoModelPermissions'
],
'PAGE_SIZE': 25,
'DEFAULT_FILTER_BACKENDS': [
'django_filters.rest_framework.DjangoFilterBackend',
],
'UNICODE_JSON': False
}
| bsd-2-clause |
nimbusproject/epumgmt | src/python/tests/test_epumgmt_defaults_log_events.py | 1 | 16586 | import os
import shutil
import tempfile
import ConfigParser
import epumgmt.defaults.log_events
from epumgmt.defaults import DefaultParameters
from mocks.common import FakeCommon
class TestAmqpEvents:
def setup(self):
self.runlogdir = tempfile.mkdtemp()
self.vmlogdir = tempfile.mkdtemp()
producer_dir = os.path.join(self.runlogdir, "producer1-container")
os.mkdir(producer_dir)
self.producer_ioncontainer_log = os.path.join(producer_dir, "ioncontainer.log")
with open(self.producer_ioncontainer_log, "w") as container_file:
container_file.write("contents!")
consumer_dir = os.path.join(self.runlogdir, "epuworker_container")
os.mkdir(consumer_dir)
self.consumer_ioncontainer_log = os.path.join(consumer_dir, "ioncontainer.log")
with open(self.consumer_ioncontainer_log, "w") as container_file:
container_file.write("contents!")
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
self.config.set("events", "runlogdir", self.runlogdir)
self.config.set("events", "vmlogdir", self.vmlogdir)
self.c = FakeCommon()
self.p = DefaultParameters(self.config, None)
self.amqp_events = epumgmt.defaults.log_events.AmqpEvents(self.p, self.c, None, "")
def teardown(self):
shutil.rmtree(self.runlogdir)
shutil.rmtree(self.vmlogdir)
def test_create_datetime(self):
year = 2011
month = 4
day = 5
hour = 4
minute = 3
second = 7
microsecond = 6
timestamp = { "year": year, "month": month, "day": day,
"hour": hour, "minute": minute, "second": second,
"microsecond": microsecond }
got_datetime = self.amqp_events._create_datetime(timestamp)
print dir(got_datetime)
assert got_datetime.year == year
assert got_datetime.minute == minute
assert got_datetime.day == day
def test_set_workproducerlog_filenames(self):
self.amqp_events._set_workproducerlog_filenames()
assert self.producer_ioncontainer_log in self.amqp_events.workproducerlog_filenames
def test_set_workconsumerlog_filenames(self):
self.amqp_events._set_workconsumerlog_filenames()
assert self.consumer_ioncontainer_log in self.amqp_events.workconsumerlog_filenames
def test_update_log_filenames(self):
self.amqp_events._update_log_filenames()
assert self.consumer_ioncontainer_log in self.amqp_events.workconsumerlog_filenames
assert self.producer_ioncontainer_log in self.amqp_events.workproducerlog_filenames
def test_get_event_datetimes_dict(self):
got_datetimes = self.amqp_events.get_event_datetimes_dict("fake event")
assert got_datetimes == {}
job_begin_id = 545454
job_begin_event = '2011-07-07 11:03:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_begin", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_begin_id
job_end_id = 424242
job_end_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_end", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_end_id
with open(self.consumer_ioncontainer_log, "w") as container:
container.write(job_begin_event + job_end_event)
job_sent_id = 424244
job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
with open(self.producer_ioncontainer_log, "w") as container:
container.write(job_sent_event)
got_datetimes = self.amqp_events.get_event_datetimes_dict("job_end")
assert got_datetimes.has_key(job_end_id)
got_datetimes = self.amqp_events.get_event_datetimes_dict("job_begin")
assert got_datetimes.has_key(job_begin_id)
got_datetimes = self.amqp_events.get_event_datetimes_dict("job_sent")
assert got_datetimes.has_key(job_sent_id)
def test_get_event_datetimes_dict_badfile(self):
job_sent_id = 424244
job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
with open(self.producer_ioncontainer_log, "w") as container:
container.write(job_sent_event)
old_mode = os.stat(self.producer_ioncontainer_log).st_mode
os.chmod(self.producer_ioncontainer_log, 0)
got_datetimes = self.amqp_events.get_event_datetimes_dict("job_sent")
failed_to_open = [message for (level, message)
in self.c.log.transcript
if level == "ERROR"
and "Failed to open and read" in message]
assert len(failed_to_open) == 1
os.chmod(self.producer_ioncontainer_log, old_mode)
def test_get_event_count(self):
job_sent_id = 424244
job_sent_event = '2011-07-07 11:04:07,532 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {"eventname": "job_sent", "timestamp": {"hour": 18, "month": 7, "second": 7, "microsecond": 532627, "year": 2011, "day": 7, "minute": 4}, "uniquekey": "2c5a9f30-a1b8-4621-ac68-d66ca1cd99f5", "eventsource": "worker", "extra": {"batchid": "xchg1310061055-jobs", "work_amount": 0, "jobid": %s}}\n' % job_sent_id
with open(self.producer_ioncontainer_log, "w") as container:
container.write(job_sent_event)
count = self.amqp_events.get_event_count("job_sent")
assert count == 1
class TestControllerEvents:
def setup(self):
self.vardir = tempfile.mkdtemp()
self.runlogdir = "runlogs"
self.vmlogdir = "vmlogs"
self.test_event = "testevent 4-5-6 12:12:12.12"
controller_dir = os.path.join(self.vardir, self.runlogdir, "epucontrollerkill_logs")
os.makedirs(controller_dir)
self.controller_ioncontainer_log = os.path.join(controller_dir, "ioncontainer.log")
with open(self.controller_ioncontainer_log, "w") as container_file:
container_file.write(self.test_event)
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
self.config.set("events", "runlogdir", self.runlogdir)
self.config.add_section("ecdirs")
self.config.set("ecdirs", "var", self.vardir)
self.p = DefaultParameters(self.config, None)
self.c = FakeCommon(self.p)
self.controller_events = epumgmt.defaults.log_events.ControllerEvents(self.p, self.c, None, "")
def teardown(self):
shutil.rmtree(self.vardir)
def test_set_controllerlog_filenames(self):
self.controller_events._set_controllerlog_filenames()
assert self.controller_ioncontainer_log in self.controller_events.controllerlog_filenames
def test_update_log_filenames(self):
self.controller_events._update_log_filenames()
assert self.controller_ioncontainer_log in self.controller_events.controllerlog_filenames
def test_create_datetime(self):
month = 8
day = 9
year = 1985
hour = 1
minute = 42
second = 43
microsecond = 44
date_string = "%s - %s - %s" % (year, month, day)
time_string = "%s:%s:%s.%s" % (hour, minute, second, microsecond)
datetime = self.controller_events._create_datetime(date_string, time_string)
assert datetime.month == month
def test_get_event_datetimes_list(self):
event = "testevent"
event_list = self.controller_events.get_event_datetimes_list(event)
assert len(event_list) == 1
class TestTorqueEvents:
def setup(self):
self.vardir = tempfile.mkdtemp()
self.runlogdir = "runlogs"
self.vmlogdir = "vmlogs"
self.job_id = 5
self.torque_event = "05/25/2011 15:57:42;0008;PBS_Server;Job;%s.ip-10-203-66-146.ec2.internal;Job Queued at request of [email protected], owner = [email protected], job name = tmp5TEZaU, queue = default" % self.job_id
torque_dir = os.path.join(self.vardir, self.runlogdir, "torque-server_logs")
os.makedirs(torque_dir)
self.torque_log = os.path.join(torque_dir, "torque.log")
with open(self.torque_log, "w") as torque_file:
torque_file.write(self.torque_event)
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
self.config.set("events", "runlogdir", self.runlogdir)
self.config.add_section("ecdirs")
self.config.set("ecdirs", "var", self.vardir)
self.p = DefaultParameters(self.config, None)
self.c = FakeCommon(self.p)
self.torque_events = epumgmt.defaults.log_events.TorqueEvents(self.p, self.c, None, "")
def test_set_serverlog_filenames(self):
self.torque_events._set_serverlog_filenames()
assert self.torque_log in self.torque_events.serverlog_filenames
def test_update_log_filenames(self):
self.torque_events._update_log_filenames()
assert self.torque_log in self.torque_events.serverlog_filenames
def test_create_datetime(self):
year = 2011
month = 4
day = 5
hour = 4
minute = 3
second = 7
microsecond = 6
date = "%s/%s/%s" % (month, day, year)
time = "%s:%s:%s" % (hour, minute, second)
got_datetime = self.torque_events._create_datetime(date, time)
print dir(got_datetime)
assert got_datetime.year == year
assert got_datetime.minute == minute
assert got_datetime.day == day
assert got_datetime.hour - epumgmt.defaults.log_events.UTC_OFFSET == hour
def test_get_event_datetimes_dict(self):
# Test behaviour with bad event type
event = "non-existent"
event_times = self.torque_events.get_event_datetimes_dict(event)
assert event_times == {}
# Test correct parsing behaviour
event = "job_sent"
event_times = self.torque_events.get_event_datetimes_dict(event)
assert event_times.has_key(self.job_id)
# Test handling of non-readable file
self.c.log.transcript = []
os.chmod(self.torque_log, 0)
event = "job_sent"
event_times = self.torque_events.get_event_datetimes_dict(event)
errors = [message for (level, message)
in self.c.log.transcript
if level == "ERROR"]
print errors
assert "Failed to open and read from file" in errors[0]
class TestNodeEvents:
def setup(self):
self.vardir = tempfile.mkdtemp()
self.runlogdir = "runlogs"
self.logfiledir = "logs"
self.run_name = "test-run"
self.launch_ctx_id = "imauuidhonest"
self.launch_ctx_done = "2011-06-14 09:33:08,268 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {\"eventname\": \"launch_ctx_done\", \"timestamp\": {\"hour\": 16, \"month\": 6, \"second\": 8, \"microsecond\": 268628, \"year\": 2011, \"day\": 14, \"minute\": 33}, \"uniquekey\": \"8311960b-2802-4976-ae4d-1c4e7e7b9ee5\", \"eventsource\": \"provisioner\", \"extra\": {\"launch_id\": \"e62df223-0d7d-4882-8583-98de1c14f5c8\", \"node_ids\": [\"%s\"]}}" % self.launch_ctx_id
self.vmkill_event_id = "arealid"
self.vmkill_event = "2011-06-14 09:33:08,268 [cei_events : 32] WARNING:CLOUDYVENT_JSON: {\"eventname\": \"fetch_killed\", \"timestamp\": {\"hour\": 16, \"month\": 6, \"second\": 8, \"microsecond\": 268628, \"year\": 2011, \"day\": 14, \"minute\": 33}, \"uniquekey\": \"8311960b-2802-4976-ae4d-1c4e7e7b9ee5\", \"eventsource\": \"provisioner\", \"extra\": {\"launch_id\": \"e62df223-0d7d-4882-8583-98de1c14f5c8\", \"iaas_id\": \"%s\"}}" % self.vmkill_event_id
provisioner_dir = os.path.join(self.vardir, self.runlogdir, self.run_name, "provisioner")
os.makedirs(provisioner_dir)
vmkill_dir = os.path.join(self.vardir, self.logfiledir)
os.makedirs(vmkill_dir)
self.provisioner_log = os.path.join(provisioner_dir, "ioncontainer.log")
with open(self.provisioner_log, "w") as provisioner_file:
provisioner_file.write(self.launch_ctx_done)
self.vmkill_log = os.path.join(vmkill_dir, "--%s-fetchkill-" % self.run_name)
with open(self.vmkill_log, "w") as vmkill_file:
vmkill_file.write(self.vmkill_event)
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
self.config.set("events", "runlogdir", self.runlogdir)
self.config.add_section("logging")
self.config.set("logging", "logfiledir", self.logfiledir)
self.config.add_section("ecdirs")
self.config.set("ecdirs", "var", self.vardir)
self.p = DefaultParameters(self.config, None)
self.c = FakeCommon(self.p)
self.node_events = epumgmt.defaults.log_events.NodeEvents(self.p, self.c, None, self.run_name)
def teardown(self):
shutil.rmtree(self.vardir)
def test_create_datetime(self):
year = 2011
month = 4
day = 5
hour = 4
minute = 3
second = 7
microsecond = 6
timestamp = { "year": year, "month": month, "day": day,
"hour": hour, "minute": minute, "second": second,
"microsecond": microsecond }
got_datetime = self.node_events._create_datetime(timestamp)
print dir(got_datetime)
assert got_datetime.year == year
assert got_datetime.minute == minute
assert got_datetime.day == day
def test_set_provisionerlog_filenames(self):
self.node_events._set_provisionerlog_filenames()
assert self.provisioner_log in self.node_events.provisionerlog_filenames
def test_set_vmkilllog_filenames(self):
self.node_events._set_vmkilllog_filenames()
assert self.vmkill_log in self.node_events.vmkilllog_filenames
def test_update_log_filenames(self):
self.node_events._update_log_filenames()
assert self.vmkill_log in self.node_events.vmkilllog_filenames
assert self.provisioner_log in self.node_events.provisionerlog_filenames
def test_get_event_datetimes_dict(self):
event = "fake-event"
event_times = self.node_events.get_event_datetimes_dict(event)
assert event_times == {}
event = "launch_ctx_done"
event_times = self.node_events.get_event_datetimes_dict(event)
print event_times
assert event_times.has_key(self.launch_ctx_id)
event = "fetch_killed"
event_times = self.node_events.get_event_datetimes_dict(event)
print event_times
assert event_times.has_key(self.vmkill_event_id)
# test when we have an unreadable file
self.c.log.transcript = []
os.chmod(self.provisioner_log, 0)
event = "launch_ctx_done"
event_times = self.node_events.get_event_datetimes_dict(event)
print event_times
failed_to_open = [message for (level, message)
in self.c.log.transcript
if level == "ERROR"
and "Failed to open and read" in message]
assert len(failed_to_open) == 1
def test_get_event_count(self):
event_count = self.node_events.get_event_count("launch_ctx_done")
print event_count
assert event_count == 1
| apache-2.0 |
maljac/odoo-addons | partner_views_fields/res_config.py | 1 | 1422 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models
class partner_configuration(models.TransientModel):
_inherit = 'base.config.settings'
group_ref = fields.Boolean(
"Show Reference On Partners Tree View",
implied_group='partner_views_fields.group_ref',
)
group_user_id = fields.Boolean(
"Show Commercial On Partners Tree View",
implied_group='partner_views_fields.group_user_id',
)
group_city = fields.Boolean(
"Show City On Partners Tree and Search Views",
implied_group='partner_views_fields.group_city',
)
group_state_id = fields.Boolean(
"Show State On Partners Tree and Search Views",
implied_group='partner_views_fields.group_state_id',
)
group_country_id = fields.Boolean(
"Show Country On Partners Tree and Search Views",
implied_group='partner_views_fields.group_country_id',
)
group_function = fields.Boolean(
"Show Function On Partners Tree and Search Views",
implied_group='partner_views_fields.group_function',
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dennisss/sympy | sympy/assumptions/tests/test_assumptions_2.py | 30 | 1961 | """
rename this to test_assumptions.py when the old assumptions system is deleted
"""
from sympy.abc import x, y
from sympy.assumptions.assume import global_assumptions, Predicate
from sympy.assumptions.ask import _extract_facts, Q
from sympy.core import symbols
from sympy.logic.boolalg import Or
from sympy.printing import pretty
def test_equal():
"""Test for equality"""
assert Q.positive(x) == Q.positive(x)
assert Q.positive(x) != ~Q.positive(x)
assert ~Q.positive(x) == ~Q.positive(x)
def test_pretty():
assert pretty(Q.positive(x)) == "Q.positive(x)"
assert pretty(
set([Q.positive, Q.integer])) == "set([Q.integer, Q.positive])"
def test_extract_facts():
a, b = symbols('a b', cls=Predicate)
assert _extract_facts(a(x), x) == a
assert _extract_facts(a(x), y) is None
assert _extract_facts(~a(x), x) == ~a
assert _extract_facts(~a(x), y) is None
assert _extract_facts(a(x) | b(x), x) == a | b
assert _extract_facts(a(x) | ~b(x), x) == a | ~b
assert _extract_facts(a(x) & b(y), x) == a
assert _extract_facts(a(x) & b(y), y) == b
assert _extract_facts(a(x) | b(y), x) == None
assert _extract_facts(~(a(x) | b(y)), x) == ~a
def test_global():
"""Test for global assumptions"""
global_assumptions.add(Q.is_true(x > 0))
assert Q.is_true(x > 0) in global_assumptions
global_assumptions.remove(Q.is_true(x > 0))
assert not Q.is_true(x > 0) in global_assumptions
# same with multiple of assumptions
global_assumptions.add(Q.is_true(x > 0), Q.is_true(y > 0))
assert Q.is_true(x > 0) in global_assumptions
assert Q.is_true(y > 0) in global_assumptions
global_assumptions.clear()
assert not Q.is_true(x > 0) in global_assumptions
assert not Q.is_true(y > 0) in global_assumptions
def test_composite_predicates():
pred = Q.integer | ~Q.positive
assert type(pred(x)) is Or
assert pred(x) == Q.integer(x) | ~Q.positive(x)
| bsd-3-clause |
hbrunn/OCB | addons/account/report/account_partner_balance.py | 286 | 11049 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.tools.translate import _
from openerp.report import report_sxw
from common_report_header import common_report_header
class partner_balance(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
super(partner_balance, self).__init__(cr, uid, name, context=context)
self.account_ids = []
self.localcontext.update( {
'time': time,
'get_fiscalyear': self._get_fiscalyear,
'get_journal': self._get_journal,
'get_filter': self._get_filter,
'get_account': self._get_account,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_partners':self._get_partners,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
self.display_partner = data['form'].get('display_partner', 'non-zero_balance')
obj_move = self.pool.get('account.move.line')
self.query = obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
self.result_selection = data['form'].get('result_selection')
self.target_move = data['form'].get('target_move', 'all')
if (self.result_selection == 'customer' ):
self.ACCOUNT_TYPE = ('receivable',)
elif (self.result_selection == 'supplier'):
self.ACCOUNT_TYPE = ('payable',)
else:
self.ACCOUNT_TYPE = ('payable', 'receivable')
self.cr.execute("SELECT a.id " \
"FROM account_account a " \
"LEFT JOIN account_account_type t " \
"ON (a.type = t.code) " \
"WHERE a.type IN %s " \
"AND a.active", (self.ACCOUNT_TYPE,))
self.account_ids = [a for (a,) in self.cr.fetchall()]
res = super(partner_balance, self).set_context(objects, data, ids, report_type=report_type)
lines = self.lines()
sum_debit = sum_credit = sum_litige = 0
for line in filter(lambda x: x['type'] == 3, lines):
sum_debit += line['debit'] or 0
sum_credit += line['credit'] or 0
sum_litige += line['enlitige'] or 0
self.localcontext.update({
'lines': lambda: lines,
'sum_debit': lambda: sum_debit,
'sum_credit': lambda: sum_credit,
'sum_litige': lambda: sum_litige,
})
return res
def lines(self):
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
full_account = []
self.cr.execute(
"SELECT p.ref,l.account_id,ac.name AS account_name,ac.code AS code,p.name, sum(debit) AS debit, sum(credit) AS credit, " \
"CASE WHEN sum(debit) > sum(credit) " \
"THEN sum(debit) - sum(credit) " \
"ELSE 0 " \
"END AS sdebit, " \
"CASE WHEN sum(debit) < sum(credit) " \
"THEN sum(credit) - sum(debit) " \
"ELSE 0 " \
"END AS scredit, " \
"(SELECT sum(debit-credit) " \
"FROM account_move_line l " \
"WHERE partner_id = p.id " \
"AND " + self.query + " " \
"AND blocked = TRUE " \
") AS enlitige " \
"FROM account_move_line l LEFT JOIN res_partner p ON (l.partner_id=p.id) " \
"JOIN account_account ac ON (l.account_id = ac.id)" \
"JOIN account_move am ON (am.id = l.move_id)" \
"WHERE ac.type IN %s " \
"AND am.state IN %s " \
"AND " + self.query + "" \
"GROUP BY p.id, p.ref, p.name,l.account_id,ac.name,ac.code " \
"ORDER BY l.account_id,p.name",
(self.ACCOUNT_TYPE, tuple(move_state)))
res = self.cr.dictfetchall()
if self.display_partner == 'non-zero_balance':
full_account = [r for r in res if r['sdebit'] > 0 or r['scredit'] > 0]
else:
full_account = [r for r in res]
for rec in full_account:
if not rec.get('name', False):
rec.update({'name': _('Unknown Partner')})
## We will now compute Total
subtotal_row = self._add_subtotal(full_account)
return subtotal_row
def _add_subtotal(self, cleanarray):
i = 0
completearray = []
tot_debit = 0.0
tot_credit = 0.0
tot_scredit = 0.0
tot_sdebit = 0.0
tot_enlitige = 0.0
for r in cleanarray:
# For the first element we always add the line
# type = 1 is the line is the first of the account
# type = 2 is an other line of the account
if i==0:
# We add the first as the header
#
##
new_header = {}
new_header['ref'] = ''
new_header['name'] = r['account_name']
new_header['code'] = r['code']
new_header['debit'] = r['debit']
new_header['credit'] = r['credit']
new_header['scredit'] = tot_scredit
new_header['sdebit'] = tot_sdebit
new_header['enlitige'] = tot_enlitige
new_header['balance'] = r['debit'] - r['credit']
new_header['type'] = 3
##
completearray.append(new_header)
#
r['type'] = 1
r['balance'] = float(r['sdebit']) - float(r['scredit'])
completearray.append(r)
#
tot_debit = r['debit']
tot_credit = r['credit']
tot_scredit = r['scredit']
tot_sdebit = r['sdebit']
tot_enlitige = (r['enlitige'] or 0.0)
#
else:
if cleanarray[i]['account_id'] <> cleanarray[i-1]['account_id']:
new_header['debit'] = tot_debit
new_header['credit'] = tot_credit
new_header['scredit'] = tot_scredit
new_header['sdebit'] = tot_sdebit
new_header['enlitige'] = tot_enlitige
new_header['balance'] = float(tot_sdebit) - float(tot_scredit)
new_header['type'] = 3
# we reset the counter
tot_debit = r['debit']
tot_credit = r['credit']
tot_scredit = r['scredit']
tot_sdebit = r['sdebit']
tot_enlitige = (r['enlitige'] or 0.0)
#
##
new_header = {}
new_header['ref'] = ''
new_header['name'] = r['account_name']
new_header['code'] = r['code']
new_header['debit'] = tot_debit
new_header['credit'] = tot_credit
new_header['scredit'] = tot_scredit
new_header['sdebit'] = tot_sdebit
new_header['enlitige'] = tot_enlitige
new_header['balance'] = float(tot_sdebit) - float(tot_scredit)
new_header['type'] = 3
##get_fiscalyear
##
completearray.append(new_header)
##
#
r['type'] = 1
#
r['balance'] = float(r['sdebit']) - float(r['scredit'])
completearray.append(r)
if cleanarray[i]['account_id'] == cleanarray[i-1]['account_id']:
# we reset the counter
new_header['debit'] = tot_debit
new_header['credit'] = tot_credit
new_header['scredit'] = tot_scredit
new_header['sdebit'] = tot_sdebit
new_header['enlitige'] = tot_enlitige
new_header['balance'] = float(tot_sdebit) - float(tot_scredit)
new_header['type'] = 3
tot_debit = tot_debit + r['debit']
tot_credit = tot_credit + r['credit']
tot_scredit = tot_scredit + r['scredit']
tot_sdebit = tot_sdebit + r['sdebit']
tot_enlitige = tot_enlitige + (r['enlitige'] or 0.0)
new_header['debit'] = tot_debit
new_header['credit'] = tot_credit
new_header['scredit'] = tot_scredit
new_header['sdebit'] = tot_sdebit
new_header['enlitige'] = tot_enlitige
new_header['balance'] = float(tot_sdebit) - float(tot_scredit)
#
r['type'] = 2
#
r['balance'] = float(r['sdebit']) - float(r['scredit'])
#
completearray.append(r)
i = i + 1
return completearray
def _get_partners(self):
if self.result_selection == 'customer':
return _('Receivable Accounts')
elif self.result_selection == 'supplier':
return _('Payable Accounts')
elif self.result_selection == 'customer_supplier':
return _('Receivable and Payable Accounts')
return ''
class report_partnerbalance(osv.AbstractModel):
_name = 'report.account.report_partnerbalance'
_inherit = 'report.abstract_report'
_template = 'account.report_partnerbalance'
_wrapped_report_class = partner_balance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
haeusser/tensorflow | tensorflow/contrib/legacy_seq2seq/python/kernel_tests/seq2seq_test.py | 5 | 43745 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for functional style sequence-to-sequence models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import math
import random
import sys
# TODO: #6568 Remove this hack that makes dlopen() not crash.
if hasattr(sys, "getdlopenflags") and hasattr(sys, "setdlopenflags"):
import ctypes
sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL)
import numpy as np
from tensorflow.contrib.legacy_seq2seq.python.ops import seq2seq as seq2seq_lib
from tensorflow.contrib.rnn.python.ops import core_rnn
from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_impl
from tensorflow.python.ops import rnn
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adam
class Seq2SeqTest(test.TestCase):
def testRNNDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
_, enc_state = core_rnn.static_rnn(
core_rnn_cell_impl.GRUCell(2), inp, dtype=dtypes.float32)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.rnn_decoder(dec_inp, enc_state, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testBasicRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.basic_rnn_seq2seq(inp, dec_inp, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testTiedRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.tied_rnn_seq2seq(inp, dec_inp, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(1, len(res))
self.assertEqual((2, 2), res[0].shape)
def testEmbeddingRNNDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
_, enc_state = core_rnn.static_rnn(cell, inp, dtype=dtypes.float32)
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.embedding_rnn_decoder(
dec_inp, enc_state, cell_fn(), num_symbols=4, embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
res = sess.run([mem])
self.assertEqual(1, len(res))
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
def testEmbeddingRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
dec, mem = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test with state_is_tuple=False.
with variable_scope.variable_scope("no_tuple"):
cell_nt = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
dec, mem = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_nt,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 4), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# Test that previous-feeding model ignores inputs after the first.
dec_inp2 = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(3)
]
with variable_scope.variable_scope("other"):
d3, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp2,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=constant_op.constant(True))
with variable_scope.variable_scope("other_2"):
d1, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=True)
with variable_scope.variable_scope("other_3"):
d2, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp2,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=True)
sess.run([variables.global_variables_initializer()])
res1 = sess.run(d1)
res2 = sess.run(d2)
res3 = sess.run(d3)
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testEmbeddingTiedRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell = functools.partial(
core_rnn_cell_impl.BasicLSTMCell,
2, state_is_tuple=True)
dec, mem = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp, dec_inp, cell(), num_symbols=5, embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test when num_decoder_symbols is provided, the size of decoder output
# is num_decoder_symbols.
with variable_scope.variable_scope("decoder_symbols_seq2seq"):
dec, mem = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
num_decoder_symbols=3,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 3), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# Test that previous-feeding model ignores inputs after the first.
dec_inp2 = [constant_op.constant(0, dtypes.int32, shape=[2])] * 3
with variable_scope.variable_scope("other"):
d3, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp2,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=constant_op.constant(True))
with variable_scope.variable_scope("other_2"):
d1, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=True)
with variable_scope.variable_scope("other_3"):
d2, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp2,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=True)
sess.run([variables.global_variables_initializer()])
res1 = sess.run(d1)
res2 = sess.run(d2)
res3 = sess.run(d3)
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testAttentionDecoder1(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Create a new cell instance for the decoder, since it uses a
# different variable scope
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testAttentionDecoder2(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(),
output_size=4, num_heads=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testDynamicAttentionDecoder1(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = rnn.dynamic_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = enc_outputs
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testDynamicAttentionDecoder2(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = rnn.dynamic_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = enc_outputs
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(),
output_size=4, num_heads=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testAttentionDecoderStateIsTuple(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
single_cell = lambda: core_rnn_cell_impl.BasicLSTMCell( # pylint: disable=g-long-lambda
2, state_is_tuple=True)
cell_fn = lambda: core_rnn_cell_impl.MultiRNNCell( # pylint: disable=g-long-lambda
cells=[single_cell() for _ in range(2)], state_is_tuple=True)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(2, len(res[0]))
self.assertEqual((2, 2), res[0][0].c.shape)
self.assertEqual((2, 2), res[0][0].h.shape)
self.assertEqual((2, 2), res[0][1].c.shape)
self.assertEqual((2, 2), res[0][1].h.shape)
def testDynamicAttentionDecoderStateIsTuple(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.MultiRNNCell( # pylint: disable=g-long-lambda
cells=[core_rnn_cell_impl.BasicLSTMCell(2) for _ in range(2)])
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size])
for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(2, len(res[0]))
self.assertEqual((2, 2), res[0][0].c.shape)
self.assertEqual((2, 2), res[0][0].h.shape)
self.assertEqual((2, 2), res[0][1].c.shape)
self.assertEqual((2, 2), res[0][1].h.shape)
def testEmbeddingAttentionDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.embedding_attention_decoder(
dec_inp,
enc_state,
attn_states,
cell_fn(),
num_symbols=4,
embedding_size=2,
output_size=3)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 3), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testEmbeddingAttentionSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
dec, mem = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test with state_is_tuple=False.
with variable_scope.variable_scope("no_tuple"):
cell_fn = functools.partial(
core_rnn_cell_impl.BasicLSTMCell,
2, state_is_tuple=False)
cell_nt = cell_fn()
dec, mem = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell_nt,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 4), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# TODO(ebrevdo, lukaszkaiser): Re-enable once RNNCells allow reuse
# within a variable scope that already has a weights tensor.
#
# # Test that previous-feeding model ignores inputs after the first.
# dec_inp2 = [
# constant_op.constant(
# 0, dtypes.int32, shape=[2]) for _ in range(3)
# ]
# with variable_scope.variable_scope("other"):
# d3, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp2,
# cell_fn(),
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=constant_op.constant(True))
# sess.run([variables.global_variables_initializer()])
# variable_scope.get_variable_scope().reuse_variables()
# cell = cell_fn()
# d1, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp,
# cell,
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=True)
# d2, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp2,
# cell,
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=True)
# res1 = sess.run(d1)
# res2 = sess.run(d2)
# res3 = sess.run(d3)
# self.assertAllClose(res1, res2)
# self.assertAllClose(res1, res3)
def testOne2ManyRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp_dict = {}
dec_inp_dict["0"] = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
dec_inp_dict["1"] = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(4)
]
dec_symbols_dict = {"0": 5, "1": 6}
def EncCellFn():
return core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
def DecCellsFn():
return dict(
(k, core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True))
for k in dec_symbols_dict)
outputs_dict, state_dict = (seq2seq_lib.one2many_rnn_seq2seq(
enc_inp, dec_inp_dict, EncCellFn(), DecCellsFn(),
2, dec_symbols_dict, embedding_size=2))
sess.run([variables.global_variables_initializer()])
res = sess.run(outputs_dict["0"])
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run(outputs_dict["1"])
self.assertEqual(4, len(res))
self.assertEqual((2, 6), res[0].shape)
res = sess.run([state_dict["0"]])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
res = sess.run([state_dict["1"]])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test that previous-feeding model ignores inputs after the first, i.e.
# dec_inp_dict2 has different inputs from dec_inp_dict after the first
# time-step.
dec_inp_dict2 = {}
dec_inp_dict2["0"] = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(3)
]
dec_inp_dict2["1"] = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(4)
]
with variable_scope.variable_scope("other"):
outputs_dict3, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict2,
EncCellFn(),
DecCellsFn(),
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=constant_op.constant(True))
with variable_scope.variable_scope("other_2"):
outputs_dict1, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict,
EncCellFn(),
DecCellsFn(),
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=True)
with variable_scope.variable_scope("other_3"):
outputs_dict2, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict2,
EncCellFn(),
DecCellsFn(),
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=True)
sess.run([variables.global_variables_initializer()])
res1 = sess.run(outputs_dict1["0"])
res2 = sess.run(outputs_dict2["0"])
res3 = sess.run(outputs_dict3["0"])
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testSequenceLoss(self):
with self.test_session() as sess:
logits = [constant_op.constant(i + 0.5, shape=[2, 5]) for i in range(3)]
targets = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
weights = [constant_op.constant(1.0, shape=[2]) for i in range(3)]
average_loss_per_example = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=True,
average_across_batch=True)
res = sess.run(average_loss_per_example)
self.assertAllClose(1.60944, res)
average_loss_per_sequence = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=False,
average_across_batch=True)
res = sess.run(average_loss_per_sequence)
self.assertAllClose(4.828314, res)
total_loss = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=False,
average_across_batch=False)
res = sess.run(total_loss)
self.assertAllClose(9.656628, res)
def testSequenceLossByExample(self):
with self.test_session() as sess:
output_classes = 5
logits = [
constant_op.constant(
i + 0.5, shape=[2, output_classes]) for i in range(3)
]
targets = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
weights = [constant_op.constant(1.0, shape=[2]) for i in range(3)]
average_loss_per_example = (seq2seq_lib.sequence_loss_by_example(
logits, targets, weights, average_across_timesteps=True))
res = sess.run(average_loss_per_example)
self.assertAllClose(np.asarray([1.609438, 1.609438]), res)
loss_per_sequence = seq2seq_lib.sequence_loss_by_example(
logits, targets, weights, average_across_timesteps=False)
res = sess.run(loss_per_sequence)
self.assertAllClose(np.asarray([4.828314, 4.828314]), res)
# TODO(ebrevdo, lukaszkaiser): Re-enable once RNNCells allow reuse
# within a variable scope that already has a weights tensor.
#
# def testModelWithBucketsScopeAndLoss(self):
# """Test variable scope reuse is not reset after model_with_buckets."""
# classes = 10
# buckets = [(4, 4), (8, 8)]
# with self.test_session():
# # Here comes a sample Seq2Seq model using GRU cells.
# def SampleGRUSeq2Seq(enc_inp, dec_inp, weights, per_example_loss):
# """Example sequence-to-sequence model that uses GRU cells."""
# def GRUSeq2Seq(enc_inp, dec_inp):
# cell = core_rnn_cell_impl.MultiRNNCell(
# [core_rnn_cell_impl.GRUCell(24) for _ in range(2)])
# return seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp,
# cell,
# num_encoder_symbols=classes,
# num_decoder_symbols=classes,
# embedding_size=24)
# targets = [dec_inp[i + 1] for i in range(len(dec_inp) - 1)] + [0]
# return seq2seq_lib.model_with_buckets(
# enc_inp,
# dec_inp,
# targets,
# weights,
# buckets,
# GRUSeq2Seq,
# per_example_loss=per_example_loss)
# # Now we construct the copy model.
# inp = [
# array_ops.placeholder(
# dtypes.int32, shape=[None]) for _ in range(8)
# ]
# out = [
# array_ops.placeholder(
# dtypes.int32, shape=[None]) for _ in range(8)
# ]
# weights = [
# array_ops.ones_like(
# inp[0], dtype=dtypes.float32) for _ in range(8)
# ]
# with variable_scope.variable_scope("root"):
# _, losses1 = SampleGRUSeq2Seq(
# inp, out, weights, per_example_loss=False)
# # Now check that we did not accidentally set reuse.
# self.assertEqual(False, variable_scope.get_variable_scope().reuse)
# with variable_scope.variable_scope("new"):
# _, losses2 = SampleGRUSeq2Seq
# inp, out, weights, per_example_loss=True)
# # First loss is scalar, the second one is a 1-dimensinal tensor.
# self.assertEqual([], losses1[0].get_shape().as_list())
# self.assertEqual([None], losses2[0].get_shape().as_list())
def testModelWithBuckets(self):
"""Larger tests that does full sequence-to-sequence model training."""
# We learn to copy 10 symbols in 2 buckets: length 4 and length 8.
classes = 10
buckets = [(4, 4), (8, 8)]
perplexities = [[], []] # Results for each bucket.
random_seed.set_random_seed(111)
random.seed(111)
np.random.seed(111)
with self.test_session() as sess:
# We use sampled softmax so we keep output projection separate.
w = variable_scope.get_variable("proj_w", [24, classes])
w_t = array_ops.transpose(w)
b = variable_scope.get_variable("proj_b", [classes])
# Here comes a sample Seq2Seq model using GRU cells.
def SampleGRUSeq2Seq(enc_inp, dec_inp, weights):
"""Example sequence-to-sequence model that uses GRU cells."""
def GRUSeq2Seq(enc_inp, dec_inp):
cell = core_rnn_cell_impl.MultiRNNCell(
[core_rnn_cell_impl.GRUCell(24) for _ in range(2)],
state_is_tuple=True)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=classes,
num_decoder_symbols=classes,
embedding_size=24,
output_projection=(w, b))
targets = [dec_inp[i + 1] for i in range(len(dec_inp) - 1)] + [0]
def SampledLoss(labels, inputs):
labels = array_ops.reshape(labels, [-1, 1])
return nn_impl.sampled_softmax_loss(
weights=w_t,
biases=b,
labels=labels,
inputs=inputs,
num_sampled=8,
num_classes=classes)
return seq2seq_lib.model_with_buckets(
enc_inp,
dec_inp,
targets,
weights,
buckets,
GRUSeq2Seq,
softmax_loss_function=SampledLoss)
# Now we construct the copy model.
batch_size = 8
inp = [
array_ops.placeholder(
dtypes.int32, shape=[None]) for _ in range(8)
]
out = [
array_ops.placeholder(
dtypes.int32, shape=[None]) for _ in range(8)
]
weights = [
array_ops.ones_like(
inp[0], dtype=dtypes.float32) for _ in range(8)
]
with variable_scope.variable_scope("root"):
_, losses = SampleGRUSeq2Seq(inp, out, weights)
updates = []
params = variables.all_variables()
optimizer = adam.AdamOptimizer(0.03, epsilon=1e-5)
for i in range(len(buckets)):
full_grads = gradients_impl.gradients(losses[i], params)
grads, _ = clip_ops.clip_by_global_norm(full_grads, 30.0)
update = optimizer.apply_gradients(zip(grads, params))
updates.append(update)
sess.run([variables.global_variables_initializer()])
steps = 6
for _ in range(steps):
bucket = random.choice(np.arange(len(buckets)))
length = buckets[bucket][0]
i = [
np.array(
[np.random.randint(9) + 1 for _ in range(batch_size)],
dtype=np.int32) for _ in range(length)
]
# 0 is our "GO" symbol here.
o = [np.array([0] * batch_size, dtype=np.int32)] + i
feed = {}
for i1, i2, o1, o2 in zip(inp[:length], i[:length], out[:length],
o[:length]):
feed[i1.name] = i2
feed[o1.name] = o2
if length < 8: # For the 4-bucket, we need the 5th as target.
feed[out[length].name] = o[length]
res = sess.run([updates[bucket], losses[bucket]], feed)
perplexities[bucket].append(math.exp(float(res[1])))
for bucket in range(len(buckets)):
if len(perplexities[bucket]) > 1: # Assert that perplexity went down.
self.assertLess(perplexities[bucket][-1], # 10% margin of error.
1.1 * perplexities[bucket][0])
def testModelWithBooleanFeedPrevious(self):
"""Test the model behavior when feed_previous is True.
For example, the following two cases have the same effect:
- Train `embedding_rnn_seq2seq` with `feed_previous=True`, which contains
a `embedding_rnn_decoder` with `feed_previous=True` and
`update_embedding_for_previous=True`. The decoder is fed with "<Go>"
and outputs "A, B, C".
- Train `embedding_rnn_seq2seq` with `feed_previous=False`. The decoder
is fed with "<Go>, A, B".
"""
num_encoder_symbols = 3
num_decoder_symbols = 5
batch_size = 2
num_enc_timesteps = 2
num_dec_timesteps = 3
def TestModel(seq2seq):
with self.test_session(graph=ops.Graph()) as sess:
random_seed.set_random_seed(111)
random.seed(111)
np.random.seed(111)
enc_inp = [
constant_op.constant(
i + 1, dtypes.int32, shape=[batch_size])
for i in range(num_enc_timesteps)
]
dec_inp_fp_true = [
constant_op.constant(
i, dtypes.int32, shape=[batch_size])
for i in range(num_dec_timesteps)
]
dec_inp_holder_fp_false = [
array_ops.placeholder(
dtypes.int32, shape=[batch_size])
for _ in range(num_dec_timesteps)
]
targets = [
constant_op.constant(
i + 1, dtypes.int32, shape=[batch_size])
for i in range(num_dec_timesteps)
]
weights = [
constant_op.constant(
1.0, shape=[batch_size]) for i in range(num_dec_timesteps)
]
def ForwardBackward(enc_inp, dec_inp, feed_previous):
scope_name = "fp_{}".format(feed_previous)
with variable_scope.variable_scope(scope_name):
dec_op, _ = seq2seq(enc_inp, dec_inp, feed_previous=feed_previous)
net_variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope_name)
optimizer = adam.AdamOptimizer(0.03, epsilon=1e-5)
update_op = optimizer.minimize(
seq2seq_lib.sequence_loss(dec_op, targets, weights),
var_list=net_variables)
return dec_op, update_op, net_variables
dec_op_fp_true, update_fp_true, variables_fp_true = ForwardBackward(
enc_inp, dec_inp_fp_true, feed_previous=True)
dec_op_fp_false, update_fp_false, variables_fp_false = ForwardBackward(
enc_inp, dec_inp_holder_fp_false, feed_previous=False)
sess.run(variables.global_variables_initializer())
# We only check consistencies between the variables existing in both
# the models with True and False feed_previous. Variables created by
# the loop_function in the model with True feed_previous are ignored.
v_false_name_dict = {
v.name.split("/", 1)[-1]: v
for v in variables_fp_false
}
matched_variables = [(v, v_false_name_dict[v.name.split("/", 1)[-1]])
for v in variables_fp_true]
for v_true, v_false in matched_variables:
sess.run(state_ops.assign(v_false, v_true))
# Take the symbols generated by the decoder with feed_previous=True as
# the true input symbols for the decoder with feed_previous=False.
dec_fp_true = sess.run(dec_op_fp_true)
output_symbols_fp_true = np.argmax(dec_fp_true, axis=2)
dec_inp_fp_false = np.vstack((dec_inp_fp_true[0].eval(),
output_symbols_fp_true[:-1]))
sess.run(update_fp_true)
sess.run(update_fp_false, {
holder: inp
for holder, inp in zip(dec_inp_holder_fp_false, dec_inp_fp_false)
})
for v_true, v_false in matched_variables:
self.assertAllClose(v_true.eval(), v_false.eval())
def EmbeddingRNNSeq2SeqF(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingRNNSeq2SeqNoTupleF(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingTiedRNNSeq2Seq(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingTiedRNNSeq2SeqNoTuple(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingAttentionSeq2Seq(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingAttentionSeq2SeqNoTuple(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
for model in (EmbeddingRNNSeq2SeqF, EmbeddingRNNSeq2SeqNoTupleF,
EmbeddingTiedRNNSeq2Seq, EmbeddingTiedRNNSeq2SeqNoTuple,
EmbeddingAttentionSeq2Seq, EmbeddingAttentionSeq2SeqNoTuple):
TestModel(model)
if __name__ == "__main__":
test.main()
| apache-2.0 |
qizenguf/MLC-STT | src/mem/AddrMapper.py | 69 | 3530 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Hansson
from m5.params import *
from MemObject import MemObject
# An address mapper changes the packet addresses in going from the
# slave port side of the mapper to the master port side. When the
# slave port is queried for the address ranges, it also performs the
# necessary range updates. Note that snoop requests that travel from
# the master port (i.e. the memory side) to the slave port are
# currently not modified.
class AddrMapper(MemObject):
type = 'AddrMapper'
cxx_header = 'mem/addr_mapper.hh'
abstract = True
# one port in each direction
master = MasterPort("Master port")
slave = SlavePort("Slave port")
# Range address mapper that maps a set of original ranges to a set of
# remapped ranges, where a specific range is of the same size
# (original and remapped), only with an offset.
class RangeAddrMapper(AddrMapper):
type = 'RangeAddrMapper'
cxx_header = 'mem/addr_mapper.hh'
# These two vectors should be the exact same length and each range
# should be the exact same size. Each range in original_ranges is
# mapped to the corresponding element in the remapped_ranges. Note
# that the same range can occur multiple times in the remapped
# ranges for address aliasing.
original_ranges = VectorParam.AddrRange(
"Ranges of memory that should me remapped")
remapped_ranges = VectorParam.AddrRange(
"Ranges of memory that are being mapped to")
| bsd-3-clause |
bzbarsky/servo | tests/wpt/css-tests/tools/html5lib/html5lib/ihatexml.py | 1727 | 16581 | from __future__ import absolute_import, division, unicode_literals
import re
import warnings
from .constants import DataLossWarning
baseChar = """
[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""
ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""
combiningCharacter = """
[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
#x3099 | #x309A"""
digit = """
[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""
extender = """
#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""
letter = " | ".join([baseChar, ideographic])
# Without the
name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
extender])
nameFirst = " | ".join([letter, "_"])
reChar = re.compile(r"#x([\d|A-F]{4,4})")
reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")
def charStringToList(chars):
charRanges = [item.strip() for item in chars.split(" | ")]
rv = []
for item in charRanges:
foundMatch = False
for regexp in (reChar, reCharRange):
match = regexp.match(item)
if match is not None:
rv.append([hexToInt(item) for item in match.groups()])
if len(rv[-1]) == 1:
rv[-1] = rv[-1] * 2
foundMatch = True
break
if not foundMatch:
assert len(item) == 1
rv.append([ord(item)] * 2)
rv = normaliseCharList(rv)
return rv
def normaliseCharList(charList):
charList = sorted(charList)
for item in charList:
assert item[1] >= item[0]
rv = []
i = 0
while i < len(charList):
j = 1
rv.append(charList[i])
while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
rv[-1][1] = charList[i + j][1]
j += 1
i += j
return rv
# We don't really support characters above the BMP :(
max_unicode = int("FFFF", 16)
def missingRanges(charList):
rv = []
if charList[0] != 0:
rv.append([0, charList[0][0] - 1])
for i, item in enumerate(charList[:-1]):
rv.append([item[1] + 1, charList[i + 1][0] - 1])
if charList[-1][1] != max_unicode:
rv.append([charList[-1][1] + 1, max_unicode])
return rv
def listToRegexpStr(charList):
rv = []
for item in charList:
if item[0] == item[1]:
rv.append(escapeRegexp(chr(item[0])))
else:
rv.append(escapeRegexp(chr(item[0])) + "-" +
escapeRegexp(chr(item[1])))
return "[%s]" % "".join(rv)
def hexToInt(hex_str):
return int(hex_str, 16)
def escapeRegexp(string):
specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
"[", "]", "|", "(", ")", "-")
for char in specialCharacters:
string = string.replace(char, "\\" + char)
return string
# output from the above
nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')
nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')
# Simpler things
nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]")
class InfosetFilter(object):
replacementRegexp = re.compile(r"U[\dA-F]{5,5}")
def __init__(self, replaceChars=None,
dropXmlnsLocalName=False,
dropXmlnsAttrNs=False,
preventDoubleDashComments=False,
preventDashAtCommentEnd=False,
replaceFormFeedCharacters=True,
preventSingleQuotePubid=False):
self.dropXmlnsLocalName = dropXmlnsLocalName
self.dropXmlnsAttrNs = dropXmlnsAttrNs
self.preventDoubleDashComments = preventDoubleDashComments
self.preventDashAtCommentEnd = preventDashAtCommentEnd
self.replaceFormFeedCharacters = replaceFormFeedCharacters
self.preventSingleQuotePubid = preventSingleQuotePubid
self.replaceCache = {}
def coerceAttribute(self, name, namespace=None):
if self.dropXmlnsLocalName and name.startswith("xmlns:"):
warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
return None
elif (self.dropXmlnsAttrNs and
namespace == "http://www.w3.org/2000/xmlns/"):
warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
return None
else:
return self.toXmlName(name)
def coerceElement(self, name, namespace=None):
return self.toXmlName(name)
def coerceComment(self, data):
if self.preventDoubleDashComments:
while "--" in data:
warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
data = data.replace("--", "- -")
return data
def coerceCharacters(self, data):
if self.replaceFormFeedCharacters:
for i in range(data.count("\x0C")):
warnings.warn("Text cannot contain U+000C", DataLossWarning)
data = data.replace("\x0C", " ")
# Other non-xml characters
return data
def coercePubid(self, data):
dataOutput = data
for char in nonPubidCharRegexp.findall(data):
warnings.warn("Coercing non-XML pubid", DataLossWarning)
replacement = self.getReplacementCharacter(char)
dataOutput = dataOutput.replace(char, replacement)
if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
warnings.warn("Pubid cannot contain single quote", DataLossWarning)
dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
return dataOutput
def toXmlName(self, name):
nameFirst = name[0]
nameRest = name[1:]
m = nonXmlNameFirstBMPRegexp.match(nameFirst)
if m:
warnings.warn("Coercing non-XML name", DataLossWarning)
nameFirstOutput = self.getReplacementCharacter(nameFirst)
else:
nameFirstOutput = nameFirst
nameRestOutput = nameRest
replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
for char in replaceChars:
warnings.warn("Coercing non-XML name", DataLossWarning)
replacement = self.getReplacementCharacter(char)
nameRestOutput = nameRestOutput.replace(char, replacement)
return nameFirstOutput + nameRestOutput
def getReplacementCharacter(self, char):
if char in self.replaceCache:
replacement = self.replaceCache[char]
else:
replacement = self.escapeChar(char)
return replacement
def fromXmlName(self, name):
for item in set(self.replacementRegexp.findall(name)):
name = name.replace(item, self.unescapeChar(item))
return name
def escapeChar(self, char):
replacement = "U%05X" % ord(char)
self.replaceCache[char] = replacement
return replacement
def unescapeChar(self, charcode):
return chr(int(charcode[1:], 16))
| mpl-2.0 |
9929105/KEEP | keep_backend/tests/test_api/test_data_api.py | 2 | 3603 | '''
Makes HTTP requests to each of our Data API functions to ensure there are
no templating/etc errors on the pages.
'''
import json
from tests import ApiTestCase
class DataApiV1KeyTests( ApiTestCase ):
AUTH_DETAILS = { 'format': 'json',
'user': 'admin',
'key': '35f7d1fb1890bdc05f9988d01cf1dcab' }
INVALID_AUTH = { 'format': 'json',
'user': 'admin',
'key': 'invalid_key' }
AUTH_DETAILS_OTHER = { 'format': 'json',
'user': 'test_user',
'key': '35f7d1fb1890bdc05f9988d01cf1dcab' }
def test_data_detail( self ):
'''
Test if we can list the repo details for a specific repo for a
test user.
'''
# Get the list of repos for the test user
repos = self.open( '/repos/', self.AUTH_DETAILS )
repos = json.loads( repos.content )
CSV_AUTH = dict( self.AUTH_DETAILS )
CSV_AUTH[ 'format' ] = 'csv'
for repo in repos.get( 'objects' ):
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), self.AUTH_DETAILS )
assert response.status_code == 200
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), CSV_AUTH )
assert response.status_code == 200
def test_data_detail_failures( self ):
'''
Test failure state when querying for repo data under a
non-permitted user
'''
# Get the list of repos for the test user
repos = self.open( '/repos/', self.AUTH_DETAILS )
repos = json.loads( repos.content )
for repo in repos.get( 'objects' ):
# Test valid user/key
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), self.AUTH_DETAILS_OTHER )
if repo.get( 'is_public' ):
assert response.status_code == 200
else:
assert response.status_code == 401
# Test invalid user/key
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), self.INVALID_AUTH )
assert response.status_code == 401
def test_data_post( self ):
'''
Test if we can successfully post data to the API for a repo.
'''
# Get the list of repos for the test user
repos = self.open( '/repos/', self.AUTH_DETAILS )
repos = json.loads( repos.content )
for repo in repos.get( 'objects' ):
# Grab the list of datapoints for this repo
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), self.AUTH_DETAILS )
response = json.loads( response.content )
beforeCount = response.get( 'meta' ).get( 'count' )
new_data = { 'name': 'Bob Dole',
'age': 20,
'gender': 'male',
'user': self.AUTH_DETAILS[ 'user' ],
'key': self.AUTH_DETAILS[ 'key' ],
'format': self.AUTH_DETAILS[ 'format' ] }
response = self.open( '/repos/%s/' % ( repo.get( 'id' ) ),
new_data,
method='POST' )
response = json.loads( response.content )
assert response.get( 'success' )
response = self.open( '/data/%s/' % ( repo.get( 'id' ) ), self.AUTH_DETAILS )
response = json.loads( response.content )
assert response.get( 'meta' ).get( 'count' ) == ( beforeCount + 1 )
| mit |
nickoe/asciidoc | asciidocapi.py | 85 | 8424 | #!/usr/bin/env python
"""
asciidocapi - AsciiDoc API wrapper class.
The AsciiDocAPI class provides an API for executing asciidoc. Minimal example
compiles `mydoc.txt` to `mydoc.html`:
import asciidocapi
asciidoc = asciidocapi.AsciiDocAPI()
asciidoc.execute('mydoc.txt')
- Full documentation in asciidocapi.txt.
- See the doctests below for more examples.
Doctests:
1. Check execution:
>>> import StringIO
>>> infile = StringIO.StringIO('Hello *{author}*')
>>> outfile = StringIO.StringIO()
>>> asciidoc = AsciiDocAPI()
>>> asciidoc.options('--no-header-footer')
>>> asciidoc.attributes['author'] = 'Joe Bloggs'
>>> asciidoc.execute(infile, outfile, backend='html4')
>>> print outfile.getvalue()
<p>Hello <strong>Joe Bloggs</strong></p>
>>> asciidoc.attributes['author'] = 'Bill Smith'
>>> infile = StringIO.StringIO('Hello _{author}_')
>>> outfile = StringIO.StringIO()
>>> asciidoc.execute(infile, outfile, backend='docbook')
>>> print outfile.getvalue()
<simpara>Hello <emphasis>Bill Smith</emphasis></simpara>
2. Check error handling:
>>> import StringIO
>>> asciidoc = AsciiDocAPI()
>>> infile = StringIO.StringIO('---------')
>>> outfile = StringIO.StringIO()
>>> asciidoc.execute(infile, outfile)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "asciidocapi.py", line 189, in execute
raise AsciiDocError(self.messages[-1])
AsciiDocError: ERROR: <stdin>: line 1: [blockdef-listing] missing closing delimiter
Copyright (C) 2009 Stuart Rackham. Free use of this software is granted
under the terms of the GNU General Public License (GPL).
"""
import sys,os,re,imp
API_VERSION = '0.1.2'
MIN_ASCIIDOC_VERSION = '8.4.1' # Minimum acceptable AsciiDoc version.
def find_in_path(fname, path=None):
"""
Find file fname in paths. Return None if not found.
"""
if path is None:
path = os.environ.get('PATH', '')
for dir in path.split(os.pathsep):
fpath = os.path.join(dir, fname)
if os.path.isfile(fpath):
return fpath
else:
return None
class AsciiDocError(Exception):
pass
class Options(object):
"""
Stores asciidoc(1) command options.
"""
def __init__(self, values=[]):
self.values = values[:]
def __call__(self, name, value=None):
"""Shortcut for append method."""
self.append(name, value)
def append(self, name, value=None):
if type(value) in (int,float):
value = str(value)
self.values.append((name,value))
class Version(object):
"""
Parse and compare AsciiDoc version numbers. Instance attributes:
string: String version number '<major>.<minor>[.<micro>][suffix]'.
major: Integer major version number.
minor: Integer minor version number.
micro: Integer micro version number.
suffix: Suffix (begins with non-numeric character) is ignored when
comparing.
Doctest examples:
>>> Version('8.2.5') < Version('8.3 beta 1')
True
>>> Version('8.3.0') == Version('8.3. beta 1')
True
>>> Version('8.2.0') < Version('8.20')
True
>>> Version('8.20').major
8
>>> Version('8.20').minor
20
>>> Version('8.20').micro
0
>>> Version('8.20').suffix
''
>>> Version('8.20 beta 1').suffix
'beta 1'
"""
def __init__(self, version):
self.string = version
reo = re.match(r'^(\d+)\.(\d+)(\.(\d+))?\s*(.*?)\s*$', self.string)
if not reo:
raise ValueError('invalid version number: %s' % self.string)
groups = reo.groups()
self.major = int(groups[0])
self.minor = int(groups[1])
self.micro = int(groups[3] or '0')
self.suffix = groups[4] or ''
def __cmp__(self, other):
result = cmp(self.major, other.major)
if result == 0:
result = cmp(self.minor, other.minor)
if result == 0:
result = cmp(self.micro, other.micro)
return result
class AsciiDocAPI(object):
"""
AsciiDoc API class.
"""
def __init__(self, asciidoc_py=None):
"""
Locate and import asciidoc.py.
Initialize instance attributes.
"""
self.options = Options()
self.attributes = {}
self.messages = []
# Search for the asciidoc command file.
# Try ASCIIDOC_PY environment variable first.
cmd = os.environ.get('ASCIIDOC_PY')
if cmd:
if not os.path.isfile(cmd):
raise AsciiDocError('missing ASCIIDOC_PY file: %s' % cmd)
elif asciidoc_py:
# Next try path specified by caller.
cmd = asciidoc_py
if not os.path.isfile(cmd):
raise AsciiDocError('missing file: %s' % cmd)
else:
# Try shell search paths.
for fname in ['asciidoc.py','asciidoc.pyc','asciidoc']:
cmd = find_in_path(fname)
if cmd: break
else:
# Finally try current working directory.
for cmd in ['asciidoc.py','asciidoc.pyc','asciidoc']:
if os.path.isfile(cmd): break
else:
raise AsciiDocError('failed to locate asciidoc')
self.cmd = os.path.realpath(cmd)
self.__import_asciidoc()
def __import_asciidoc(self, reload=False):
'''
Import asciidoc module (script or compiled .pyc).
See
http://groups.google.com/group/asciidoc/browse_frm/thread/66e7b59d12cd2f91
for an explanation of why a seemingly straight-forward job turned out
quite complicated.
'''
if os.path.splitext(self.cmd)[1] in ['.py','.pyc']:
sys.path.insert(0, os.path.dirname(self.cmd))
try:
try:
if reload:
import __builtin__ # Because reload() is shadowed.
__builtin__.reload(self.asciidoc)
else:
import asciidoc
self.asciidoc = asciidoc
except ImportError:
raise AsciiDocError('failed to import ' + self.cmd)
finally:
del sys.path[0]
else:
# The import statement can only handle .py or .pyc files, have to
# use imp.load_source() for scripts with other names.
try:
imp.load_source('asciidoc', self.cmd)
import asciidoc
self.asciidoc = asciidoc
except ImportError:
raise AsciiDocError('failed to import ' + self.cmd)
if Version(self.asciidoc.VERSION) < Version(MIN_ASCIIDOC_VERSION):
raise AsciiDocError(
'asciidocapi %s requires asciidoc %s or better'
% (API_VERSION, MIN_ASCIIDOC_VERSION))
def execute(self, infile, outfile=None, backend=None):
"""
Compile infile to outfile using backend format.
infile can outfile can be file path strings or file like objects.
"""
self.messages = []
opts = Options(self.options.values)
if outfile is not None:
opts('--out-file', outfile)
if backend is not None:
opts('--backend', backend)
for k,v in self.attributes.items():
if v == '' or k[-1] in '!@':
s = k
elif v is None: # A None value undefines the attribute.
s = k + '!'
else:
s = '%s=%s' % (k,v)
opts('--attribute', s)
args = [infile]
# The AsciiDoc command was designed to process source text then
# exit, there are globals and statics in asciidoc.py that have
# to be reinitialized before each run -- hence the reload.
self.__import_asciidoc(reload=True)
try:
try:
self.asciidoc.execute(self.cmd, opts.values, args)
finally:
self.messages = self.asciidoc.messages[:]
except SystemExit, e:
if e.code:
raise AsciiDocError(self.messages[-1])
if __name__ == "__main__":
"""
Run module doctests.
"""
import doctest
options = doctest.NORMALIZE_WHITESPACE + doctest.ELLIPSIS
doctest.testmod(optionflags=options)
| gpl-2.0 |
arcticshores/kivy | kivy/core/image/img_pil.py | 37 | 3535 | '''
PIL: PIL image loader
'''
__all__ = ('ImageLoaderPIL', )
try:
from PIL import Image as PILImage
except:
import Image as PILImage
from kivy.logger import Logger
from kivy.core.image import ImageLoaderBase, ImageData, ImageLoader
class ImageLoaderPIL(ImageLoaderBase):
'''Image loader based on the PIL library.
.. versionadded:: 1.0.8
Support for GIF animation added.
Gif animation has a lot of issues(transparency/color depths... etc).
In order to keep it simple, what is implimented here is what is
natively supported by the PIL library.
As a general rule, try to use gifs that have no transparency.
Gif's with transparency will work but be prepared for some
artifacts until transparency support is improved.
'''
@staticmethod
def can_save():
return True
@staticmethod
def can_load_memory():
return True
@staticmethod
def extensions():
'''Return accepted extensions for this loader'''
# See http://www.pythonware.com/library/pil/handbook/index.htm
return ('bmp', 'bufr', 'cur', 'dcx', 'fits', 'fl', 'fpx', 'gbr',
'gd', 'gif', 'grib', 'hdf5', 'ico', 'im', 'imt', 'iptc',
'jpeg', 'jpg', 'jpe', 'mcidas', 'mic', 'mpeg', 'msp',
'pcd', 'pcx', 'pixar', 'png', 'ppm', 'psd', 'sgi',
'spider', 'tga', 'tiff', 'wal', 'wmf', 'xbm', 'xpm',
'xv')
def _img_correct(self, _img_tmp):
'''Convert image to the correct format and orientation.
'''
# image loader work only with rgb/rgba image
if _img_tmp.mode.lower() not in ('rgb', 'rgba'):
try:
imc = _img_tmp.convert('RGBA')
except:
Logger.warning(
'Image: Unable to convert image to rgba (was %s)' %
(_img_tmp.mode.lower()))
raise
_img_tmp = imc
return _img_tmp
def _img_read(self, im):
'''Read images from an animated file.
'''
im.seek(0)
# Read all images inside
try:
img_ol = None
while True:
img_tmp = im
img_tmp = self._img_correct(img_tmp)
if img_ol and (hasattr(im, 'dispose') and not im.dispose):
# paste new frame over old so as to handle
# transparency properly
img_ol.paste(img_tmp, (0, 0), img_tmp)
img_tmp = img_ol
img_ol = img_tmp
yield ImageData(img_tmp.size[0], img_tmp.size[1],
img_tmp.mode.lower(), img_tmp.tostring())
im.seek(im.tell() + 1)
except EOFError:
pass
def load(self, filename):
try:
im = PILImage.open(filename)
except:
Logger.warning('Image: Unable to load image <%s>' % filename)
raise
# update internals
if not self._inline:
self.filename = filename
# returns an array of type ImageData len 1 if not a sequence image
return list(self._img_read(im))
@staticmethod
def save(filename, width, height, fmt, pixels, flipped=False):
image = PILImage.fromstring(fmt.upper(), (width, height), pixels)
if flipped:
image = image.transpose(PILImage.FLIP_TOP_BOTTOM)
image.save(filename)
return True
# register
ImageLoader.register(ImageLoaderPIL)
| mit |
ksachs/invenio | modules/bibindex/lib/bibindex_engine.py | 1 | 101146 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009,
## 2010, 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibIndex indexing engine implementation.
See bibindex executable for entry point.
"""
__revision__ = "$Id$"
import re
import sys
import time
import fnmatch
import inspect
from datetime import datetime
from invenio.config import CFG_SOLR_URL
from invenio.bibindex_engine_config import CFG_MAX_MYSQL_THREADS, \
CFG_MYSQL_THREAD_TIMEOUT, \
CFG_CHECK_MYSQL_THREADS, \
CFG_BIBINDEX_INDEX_TABLE_TYPE, \
CFG_BIBINDEX_ADDING_RECORDS_STARTED_STR, \
CFG_BIBINDEX_UPDATE_MESSAGE, \
CFG_BIBINDEX_UPDATE_MODE, \
CFG_BIBINDEX_TOKENIZER_TYPE, \
CFG_BIBINDEX_WASH_INDEX_TERMS, \
CFG_BIBINDEX_SPECIAL_TAGS
from invenio.bibauthority_config import \
CFG_BIBAUTHORITY_CONTROLLED_FIELDS_BIBLIOGRAPHIC
from invenio.bibauthority_engine import \
get_control_nos_from_recID
from invenio.search_engine import perform_request_search, \
get_index_stemming_language, \
get_synonym_terms, \
search_pattern, \
search_unit_in_bibrec
from invenio.dbquery import run_sql, DatabaseError, serialize_via_marshal, \
deserialize_via_marshal, wash_table_column_name
from invenio.bibindex_engine_washer import wash_index_term
from invenio.bibtask import task_init, write_message, get_datetime, \
task_set_option, task_get_option, task_get_task_param, \
task_update_progress, task_sleep_now_if_required
from invenio.intbitset import intbitset
from invenio.errorlib import register_exception
from invenio.solrutils_bibindex_indexer import solr_commit
from invenio.bibindex_tokenizers.BibIndexJournalTokenizer import \
CFG_JOURNAL_TAG, \
CFG_JOURNAL_PUBINFO_STANDARD_FORM, \
CFG_JOURNAL_PUBINFO_STANDARD_FORM_REGEXP_CHECK
from invenio.bibindex_termcollectors import TermCollector
from invenio.bibindex_engine_utils import load_tokenizers, \
get_all_index_names_and_column_values, \
get_index_tags, \
get_field_tags, \
get_marc_tag_indexes, \
get_nonmarc_tag_indexes, \
get_all_indexes, \
get_index_virtual_indexes, \
get_virtual_index_building_blocks, \
get_index_id_from_index_name, \
run_sql_drop_silently, \
get_min_last_updated, \
remove_inexistent_indexes, \
get_all_synonym_knowledge_bases, \
get_index_remove_stopwords, \
get_index_remove_html_markup, \
get_index_remove_latex_markup, \
filter_for_virtual_indexes, \
get_records_range_for_index, \
make_prefix, \
list_union, \
recognize_marc_tag
from invenio.bibindex_termcollectors import \
TermCollector, \
NonmarcTermCollector
from invenio.memoiseutils import Memoise
if sys.hexversion < 0x2040000:
# pylint: disable=W0622
from sets import Set as set
# pylint: enable=W0622
## precompile some often-used regexp for speed reasons:
re_subfields = re.compile('\$\$\w')
re_datetime_shift = re.compile("([-\+]{0,1})([\d]+)([dhms])")
re_prefix = re.compile('__[a-zA-Z1-9]*__')
nb_char_in_line = 50 # for verbose pretty printing
chunksize = 1000 # default size of chunks that the records will be treated by
base_process_size = 4500 # process base size
_last_word_table = None
_TOKENIZERS = load_tokenizers()
def list_unique(_list):
"""Returns a _list with duplicates removed."""
_dict = {}
for e in _list:
_dict[e] = 1
return _dict.keys()
## safety function for killing slow DB threads:
def kill_sleepy_mysql_threads(max_threads=CFG_MAX_MYSQL_THREADS,
thread_timeout=CFG_MYSQL_THREAD_TIMEOUT):
"""Check the number of DB threads and if there are more than
MAX_THREADS of them, lill all threads that are in a sleeping
state for more than THREAD_TIMEOUT seconds. (This is useful
for working around the the max_connection problem that appears
during indexation in some not-yet-understood cases.) If some
threads are to be killed, write info into the log file.
"""
res = run_sql("SHOW FULL PROCESSLIST")
if len(res) > max_threads:
for row in res:
r_id, dummy, dummy, dummy, r_command, r_time, dummy, dummy = row
if r_command == "Sleep" and int(r_time) > thread_timeout:
run_sql("KILL %s", (r_id, ))
write_message("WARNING: too many DB threads, " + \
"killing thread %s" % r_id, verbose=1)
return
def get_associated_subfield_value(recID, tag, value, associated_subfield_code):
"""Return list of ASSOCIATED_SUBFIELD_CODE, if exists, for record
RECID and TAG of value VALUE. Used by fulltext indexer only.
Note: TAG must be 6 characters long (tag+ind1+ind2+sfcode),
otherwise en empty string is returned.
FIXME: what if many tag values have the same value but different
associated_subfield_code? Better use bibrecord library for this.
"""
out = ""
if len(tag) != 6:
return out
bibXXx = "bib" + tag[0] + tag[1] + "x"
bibrec_bibXXx = "bibrec_" + bibXXx
query = """SELECT bb.field_number, b.tag, b.value FROM %s AS b, %s AS bb
WHERE bb.id_bibrec=%%s AND bb.id_bibxxx=b.id AND tag LIKE
%%s%%""" % (bibXXx, bibrec_bibXXx)
res = run_sql(query, (recID, tag[:-1]))
field_number = -1
for row in res:
if row[1] == tag and row[2] == value:
field_number = row[0]
if field_number > 0:
for row in res:
if row[0] == field_number and row[1] == tag[:-1] + associated_subfield_code:
out = row[2]
break
return out
def get_author_canonical_ids_for_recid(recID):
"""
Return list of author canonical IDs (e.g. `J.Ellis.1') for the
given record. Done by consulting BibAuthorID module.
"""
return [word[0] for word in run_sql("""SELECT data FROM aidPERSONIDDATA
JOIN aidPERSONIDPAPERS USING (personid) WHERE bibrec=%s AND
tag='canonical_name' AND flag>-2""", (recID, ))]
def swap_temporary_reindex_tables(index_id, reindex_prefix="tmp_"):
"""Atomically swap reindexed temporary table with the original one.
Delete the now-old one."""
write_message("Putting new tmp index tables " + \
"for id %s into production" % index_id)
run_sql(
"RENAME TABLE " +
"idxWORD%02dR TO old_idxWORD%02dR," % (index_id, index_id) +
"%sidxWORD%02dR TO idxWORD%02dR," % (reindex_prefix, index_id, index_id) +
"idxWORD%02dF TO old_idxWORD%02dF," % (index_id, index_id) +
"%sidxWORD%02dF TO idxWORD%02dF," % (reindex_prefix, index_id, index_id) +
"idxPAIR%02dR TO old_idxPAIR%02dR," % (index_id, index_id) +
"%sidxPAIR%02dR TO idxPAIR%02dR," % (reindex_prefix, index_id, index_id) +
"idxPAIR%02dF TO old_idxPAIR%02dF," % (index_id, index_id) +
"%sidxPAIR%02dF TO idxPAIR%02dF," % (reindex_prefix, index_id, index_id) +
"idxPHRASE%02dR TO old_idxPHRASE%02dR," % (index_id, index_id) +
"%sidxPHRASE%02dR TO idxPHRASE%02dR," % (reindex_prefix, index_id, index_id) +
"idxPHRASE%02dF TO old_idxPHRASE%02dF," % (index_id, index_id) +
"%sidxPHRASE%02dF TO idxPHRASE%02dF;" % (reindex_prefix, index_id, index_id)
)
write_message("Dropping old index tables for id %s" % index_id)
run_sql_drop_silently("""DROP TABLE old_idxWORD%02dR,
old_idxWORD%02dF,
old_idxPAIR%02dR,
old_idxPAIR%02dF,
old_idxPHRASE%02dR,
old_idxPHRASE%02dF""" % ((index_id, )* 6)
) # kwalitee: disable=sql
def init_temporary_reindex_tables(index_id, reindex_prefix="tmp_"):
"""Create reindexing temporary tables."""
write_message("Creating new tmp index tables for id %s" % index_id)
query = """DROP TABLE IF EXISTS %sidxWORD%02dF""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxWORD%02dF (
id mediumint(9) unsigned NOT NULL auto_increment,
term varchar(50) default NULL,
hitlist longblob,
PRIMARY KEY (id),
UNIQUE KEY term (term)
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
query = """DROP TABLE IF EXISTS %sidxWORD%02dR""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxWORD%02dR (
id_bibrec mediumint(9) unsigned NOT NULL,
termlist longblob,
type enum('CURRENT','FUTURE','TEMPORARY') NOT NULL default 'CURRENT',
PRIMARY KEY (id_bibrec,type)
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
query = """DROP TABLE IF EXISTS %sidxPAIR%02dF""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxPAIR%02dF (
id mediumint(9) unsigned NOT NULL auto_increment,
term varchar(100) default NULL,
hitlist longblob,
PRIMARY KEY (id),
UNIQUE KEY term (term)
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
query = """DROP TABLE IF EXISTS %sidxPAIR%02dR""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxPAIR%02dR (
id_bibrec mediumint(9) unsigned NOT NULL,
termlist longblob,
type enum('CURRENT','FUTURE','TEMPORARY') NOT NULL default 'CURRENT',
PRIMARY KEY (id_bibrec,type)
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
query = """DROP TABLE IF EXISTS %sidxPHRASE%02dF""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxPHRASE%02dF (
id mediumint(9) unsigned NOT NULL auto_increment,
term text default NULL,
hitlist longblob,
PRIMARY KEY (id),
KEY term (term(50))
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
query = """DROP TABLE IF EXISTS %sidxPHRASE%02dR""" % \
(wash_table_column_name(reindex_prefix), index_id)
run_sql_drop_silently(query) # kwalitee: disable=sql
run_sql("""CREATE TABLE %sidxPHRASE%02dR (
id_bibrec mediumint(9) unsigned NOT NULL default '0',
termlist longblob,
type enum('CURRENT','FUTURE','TEMPORARY') NOT NULL default 'CURRENT',
PRIMARY KEY (id_bibrec,type)
) ENGINE=MyISAM""" % (reindex_prefix, index_id))
def remove_subfields(s):
"Removes subfields from string, e.g. 'foo $$c bar' becomes 'foo bar'."
return re_subfields.sub(' ', s)
def get_field_indexes(field):
"""Returns indexes names and ids corresponding to the given field"""
if recognize_marc_tag(field):
#field is actually a tag
return get_marc_tag_indexes(field, virtual=False)
else:
return get_nonmarc_tag_indexes(field, virtual=False)
get_field_indexes_memoised = Memoise(get_field_indexes)
def get_index_tokenizer(index_id):
"""Returns value of a tokenizer field from idxINDEX database table
@param index_id: id of the index
"""
query = "SELECT tokenizer FROM idxINDEX WHERE id=%s" % index_id
out = None
try:
res = run_sql(query)
if res:
out = _TOKENIZERS[res[0][0]]
except DatabaseError:
write_message("Exception caught for SQL statement: %s; " + \
"column tokenizer might not exist" % query, sys.stderr)
except KeyError:
write_message("Exception caught: there is no such tokenizer")
out = None
return out
def detect_tokenizer_type(tokenizer):
"""
Checks what is the main type of the tokenizer.
For more information on tokenizer types take
a look at BibIndexTokenizer class.
@param tokenizer: instance of a tokenizer
"""
from invenio.bibindex_tokenizers.BibIndexStringTokenizer import BibIndexStringTokenizer
from invenio.bibindex_tokenizers.BibIndexRecJsonTokenizer import BibIndexRecJsonTokenizer
from invenio.bibindex_tokenizers.BibIndexMultiFieldTokenizer import BibIndexMultiFieldTokenizer
tokenizer_inheritance_tree = inspect.getmro(tokenizer.__class__)
if BibIndexStringTokenizer in tokenizer_inheritance_tree:
return CFG_BIBINDEX_TOKENIZER_TYPE['string']
if BibIndexMultiFieldTokenizer in tokenizer_inheritance_tree:
return CFG_BIBINDEX_TOKENIZER_TYPE['multifield']
if BibIndexRecJsonTokenizer in tokenizer_inheritance_tree:
return CFG_BIBINDEX_TOKENIZER_TYPE['recjson']
return CFG_BIBINDEX_TOKENIZER_TYPE['unknown']
def get_last_updated_all_indexes():
"""Returns last modification date for all defined indexes"""
query= """SELECT name, last_updated FROM idxINDEX"""
res = run_sql(query)
return res
def split_ranges(parse_string):
"""Parse a string a return the list or ranges."""
recIDs = []
ranges = parse_string.split(",")
for arange in ranges:
tmp_recIDs = arange.split("-")
if len(tmp_recIDs) == 1:
recIDs.append([int(tmp_recIDs[0]), int(tmp_recIDs[0])])
else:
if int(tmp_recIDs[0]) > int(tmp_recIDs[1]): # sanity check
tmp = tmp_recIDs[0]
tmp_recIDs[0] = tmp_recIDs[1]
tmp_recIDs[1] = tmp
recIDs.append([int(tmp_recIDs[0]), int(tmp_recIDs[1])])
return recIDs
def get_date_range(var):
"Returns the two dates contained as a low,high tuple"
limits = var.split(",")
if len(limits) == 1:
low = get_datetime(limits[0])
return low, None
if len(limits) == 2:
low = get_datetime(limits[0])
high = get_datetime(limits[1])
return low, high
return None, None
def create_range_list(res):
"""Creates a range list from a recID select query result contained
in res. The result is expected to have ascending numerical order."""
if not res:
return []
row = res[0]
if not row:
return []
else:
range_list = [[row, row]]
for row in res[1:]:
row_id = row
if row_id == range_list[-1][1] + 1:
range_list[-1][1] = row_id
else:
range_list.append([row_id, row_id])
return range_list
def beautify_range_list(range_list):
"""Returns a non overlapping, maximal range list"""
ret_list = []
for new in range_list:
found = 0
for old in ret_list:
if new[0] <= old[0] <= new[1] + 1 or new[0] - 1 <= old[1] <= new[1]:
old[0] = min(old[0], new[0])
old[1] = max(old[1], new[1])
found = 1
break
if not found:
ret_list.append(new)
return ret_list
def truncate_index_table(index_name):
"""Properly truncate the given index."""
index_id = get_index_id_from_index_name(index_name)
if index_id:
write_message('Truncating %s index table in order to reindex.' % \
index_name, verbose=2)
run_sql("""UPDATE idxINDEX SET last_updated='0000-00-00 00:00:00'
WHERE id=%s""", (index_id, ))
run_sql("TRUNCATE idxWORD%02dF" % index_id) # kwalitee: disable=sql
run_sql("TRUNCATE idxWORD%02dR" % index_id) # kwalitee: disable=sql
run_sql("TRUNCATE idxPHRASE%02dF" % index_id) # kwalitee: disable=sql
run_sql("TRUNCATE idxPHRASE%02dR" % index_id) # kwalitee: disable=sql
def update_index_last_updated(indexes, starting_time=None):
"""Update last_updated column of the index table in the database.
Puts starting time there so that if the task
was interrupted for record download,
the records will be reindexed next time.
@param indexes: list of indexes names
"""
if starting_time is None:
return None
for index_name in indexes:
write_message("updating last_updated to %s...for %s index" % \
(starting_time, index_name), verbose=9)
run_sql("UPDATE idxINDEX SET last_updated=%s WHERE name=%s",
(starting_time, index_name))
def get_percentage_completed(num_done, num_total):
""" Return a string containing the approx. percentage completed """
percentage_remaining = 100.0 * float(num_done) / float(num_total)
if percentage_remaining:
percentage_display = "(%.1f%%)" % (percentage_remaining, )
else:
percentage_display = ""
return percentage_display
def _fill_dict_of_indexes_with_empty_sets():
"""find_affected_records internal function.
Creates dict: {'index_name1':set([]), ...}
"""
index_dict = {}
tmp_all_indexes = get_all_indexes(virtual=False)
for index in tmp_all_indexes:
index_dict[index] = set([])
return index_dict
def find_affected_records_for_index(indexes=[], recIDs=[], force_all_indexes=False):
"""
Function checks which records need to be changed/reindexed
for given index/indexes.
Makes use of hstRECORD table where
different revisions of record are kept.
If parameter force_all_indexes is set
function will assign all recIDs to all indexes.
@param indexes: names of indexes for reindexation separated by coma
@param recIDs: recIDs for reindexation in form:
[[range1_down, range1_up],[range2_down, range2_up]..]
@param force_all_indexes: should we index all indexes?
"""
tmp_dates = dict(get_last_updated_all_indexes())
modification_dates = dict([(date, tmp_dates[date] or datetime(1000, 1, 1, 1, 1, 1))
for date in tmp_dates])
tmp_all_indexes = get_all_indexes(virtual=False)
indexes = remove_inexistent_indexes(indexes, leave_virtual=False)
if not indexes:
return {}
def _should_reindex_for_revision(index_name, revision_date):
try:
if modification_dates[index_name] < revision_date and \
index_name in indexes:
return True
return False
except KeyError:
return False
if force_all_indexes:
records_for_indexes = {}
all_recIDs = []
for recIDs_range in recIDs:
all_recIDs.extend(range(recIDs_range[0], recIDs_range[1]+1))
if all_recIDs:
for index in indexes:
records_for_indexes[index] = all_recIDs
return records_for_indexes
min_last_updated = get_min_last_updated(indexes)[0][0] or \
datetime(1000, 1, 1, 1, 1, 1)
indexes_to_change = _fill_dict_of_indexes_with_empty_sets()
recIDs_info = []
for recIDs_range in recIDs:
query = """SELECT id_bibrec,job_date,affected_fields FROM hstRECORD
WHERE id_bibrec BETWEEN %s AND %s AND
job_date > '%s'""" % \
(recIDs_range[0], recIDs_range[1], min_last_updated)
res = run_sql(query)
if res:
recIDs_info.extend(res)
for recID_info in recIDs_info:
recID, revision, affected_fields = recID_info
affected_fields = affected_fields.split(",")
indexes_for_recID = set()
for field in affected_fields:
if field:
field_indexes = get_field_indexes_memoised(field) or []
indexes_names = set([idx[1] for idx in field_indexes])
indexes_for_recID |= indexes_names
else:
# record was inserted, all fields were changed,
# no specific affected fields
indexes_for_recID |= set(tmp_all_indexes)
indexes_for_recID_filtered = [ind for ind in indexes_for_recID if _should_reindex_for_revision(ind, revision)]
for index in indexes_for_recID_filtered:
indexes_to_change[index].add(recID)
indexes_to_change = dict((k, list(sorted(v))) for k, v in indexes_to_change.iteritems() if v)
return indexes_to_change
def chunk_generator(rng):
"""
Splits one range into several smaller ones
with respect to global chunksize variable.
@param rng: range of records
@type rng: list in the form: [1, 2000]
"""
global chunksize
current_low = rng[0]
current_high = rng[0]
if rng[0] == None or rng[1] == None:
raise StopIteration
if rng[1] - rng[0] + 1 <= chunksize:
yield rng
else:
while current_high - 1 < rng[1]:
current_high += chunksize
yield current_low, min(current_high - 1, rng[1])
current_low += chunksize
class AbstractIndexTable(object):
"""
This class represents an index table in database.
An index consists of three different kinds of tables:
table which stores only words in db,
table which stores pairs of words and
table which stores whole phrases.
The class represents only one table. Another instance of
the class must be created in order to store different
type of terms.
This class is an abstract class. It contains methods
to connect to db and methods which facilitate
inserting/modifing/removing terms from it. The class
also contains methods which help managing the memory.
All specific methods for indexing can be found in corresponding
classes for virtual and regular indexes.
"""
def __init__(self, index_name, table_type, table_prefix="", wash_index_terms=50):
self.index_name = index_name
self.index_id = get_index_id_from_index_name(index_name)
self.table_type = table_type
self.wash_index_terms = wash_index_terms
self.table_name = wash_table_column_name(table_prefix + \
"idx" + \
table_type + \
("%02d" % self.index_id) + "F")
self.table_prefix = table_prefix
self.value = {} # cache
self.recIDs_in_mem = []
def put_into_db(self, mode="normal"):
"""Updates the current words table in the corresponding DB
idxFOO table. Mode 'normal' means normal execution,
mode 'emergency' means words index reverting to old state.
"""
write_message("%s %s wordtable flush started" % \
(self.table_name, mode))
write_message('...updating %d words into %s started' % \
(len(self.value), self.table_name))
task_update_progress("(%s:%s) flushed %d/%d words" % \
(self.table_name, self.index_name, 0, len(self.value)))
self.recIDs_in_mem = beautify_range_list(self.recIDs_in_mem)
tab_name = self.table_name[:-1] + "R"
if mode == "normal":
for group in self.recIDs_in_mem:
query = """UPDATE %s SET type='TEMPORARY' WHERE id_bibrec
BETWEEN %%s AND %%s AND type='CURRENT'""" % tab_name
write_message(query % (group[0], group[1]), verbose=9)
run_sql(query, (group[0], group[1]))
nb_words_total = len(self.value)
nb_words_report = int(nb_words_total / 10.0)
nb_words_done = 0
for word in self.value.keys():
self.put_word_into_db(word)
nb_words_done += 1
if nb_words_report != 0 and ((nb_words_done % nb_words_report) == 0):
write_message('......processed %d/%d words' % \
(nb_words_done, nb_words_total))
percentage_display = get_percentage_completed(nb_words_done, nb_words_total)
task_update_progress("(%s:%s) flushed %d/%d words %s" % \
(tab_name, self.index_name,
nb_words_done, nb_words_total,
percentage_display))
write_message('...updating %d words into %s ended' % \
(nb_words_total, tab_name))
write_message('...updating reverse table %s started' % tab_name)
if mode == "normal":
for group in self.recIDs_in_mem:
query = """UPDATE %s SET type='CURRENT' WHERE id_bibrec
BETWEEN %%s AND %%s AND type='FUTURE'""" % tab_name
write_message(query % (group[0], group[1]), verbose=9)
run_sql(query, (group[0], group[1]))
query = """DELETE FROM %s WHERE id_bibrec
BETWEEN %%s AND %%s AND type='TEMPORARY'""" % tab_name
write_message(query % (group[0], group[1]), verbose=9)
run_sql(query, (group[0], group[1]))
write_message('End of updating wordTable into %s' % \
tab_name, verbose=9)
elif mode == "emergency":
for group in self.recIDs_in_mem:
query = """UPDATE %s SET type='CURRENT' WHERE id_bibrec
BETWEEN %%s AND %%s AND type='TEMPORARY'""" % tab_name
write_message(query % (group[0], group[1]), verbose=9)
run_sql(query, (group[0], group[1]))
query = """DELETE FROM %s WHERE id_bibrec
BETWEEN %%s AND %%s AND type='FUTURE'""" % tab_name
write_message(query % (group[0], group[1]), verbose=9)
run_sql(query, (group[0], group[1]))
write_message('End of emergency flushing wordTable into %s' % \
tab_name, verbose=9)
write_message('...updating reverse table %s ended' % tab_name)
self.clean()
self.recIDs_in_mem = []
write_message("%s %s wordtable flush ended" % \
(self.table_name, mode))
task_update_progress("(%s:%s) flush ended" % \
(self.table_name, self.index_name))
def put_word_into_db(self, word):
"""Flush a single word to the database and delete it from memory"""
set = self.load_old_recIDs(word)
if set is not None: # merge the word recIDs found in memory:
hitlist_was_changed = self.merge_with_old_recIDs(word, set)
if not hitlist_was_changed:
# nothing to update:
write_message("......... unchanged hitlist for ``%s''" % \
word, verbose=9)
else:
# yes there were some new words:
write_message("......... updating hitlist for ``%s''" % \
word, verbose=9)
run_sql("UPDATE %s SET hitlist=%%s WHERE term=%%s" % wash_table_column_name(self.table_name), (set.fastdump(), word)) # kwalitee: disable=sql
else: # the word is new, will create new set:
write_message("......... inserting hitlist for ``%s''" % \
word, verbose=9)
set = intbitset(self.value[word].keys())
try:
run_sql("INSERT INTO %s (term, hitlist) VALUES (%%s, %%s)" % wash_table_column_name(self.table_name), (word, set.fastdump())) # kwalitee: disable=sql
except Exception, e:
## We send this exception to the admin only when is not
## already reparing the problem.
register_exception(prefix="Error when putting the term '%s' into db (hitlist=%s): %s\n" % (repr(word), set, e), alert_admin=(task_get_option('cmd') != 'repair'))
if not set: # never store empty words
run_sql("DELETE FROM %s WHERE term=%%s" % wash_table_column_name(self.table_name), (word,)) # kwalitee: disable=sql
def put(self, recID, word, sign):
"""Keeps track of changes done during indexing
and stores these changes in memory for further use.
Indexing process needs this information later while
filling in the database.
@param recID: recID of the record we want to update in memory
@param word: word we want to update
@param sing: sign of the word, 1 means keep this word in database,
-1 remove word from database
"""
value = self.value
try:
if self.wash_index_terms:
word = wash_index_term(word, self.wash_index_terms)
if value.has_key(word):
# the word 'word' exist already: update sign
value[word][recID] = sign
else:
value[word] = {recID: sign}
except Exception as e:
write_message(
"Error: Cannot put word %s with sign %d for recID %s (%s)."
% (word, sign, recID, e)
)
def load_old_recIDs(self, word):
"""Load existing hitlist for the word from the database index files."""
query = "SELECT hitlist FROM %s WHERE term=%%s" % self.table_name
res = run_sql(query, (word, ))
if res:
return intbitset(res[0][0])
else:
return None
def merge_with_old_recIDs(self, word, set):
"""Merge the system numbers stored in memory
(hash of recIDs with value +1 or -1 according
to whether to add/delete them) with those stored
in the database index and received in set universe
of recIDs for the given word.
Return False in case no change was done to SET, return True in case SET
was changed.
"""
oldset = intbitset(set)
set.update_with_signs(self.value[word])
return set != oldset
def clean(self):
"Cleans the cache."
self.value = {}
class VirtualIndexTable(AbstractIndexTable):
"""
There are two types of indexes: virtual and regular/normal.
Check WordTable class for more on normal indexes.
This class represents a single index table for virtual index
(see also: AbstractIndexTable).
Virtual index doesn't store its own terms,
it accumulates terms from other indexes.
Good example of virtual index is the global index which stores
terms from title, abstract, keyword, author and so on.
This class contains methods for indexing virtual indexes.
See also: run_update()
"""
def __init__(self, index_name, table_type, table_prefix="", wash_index_terms=50):
"""
Creates VirtualIndexTable instance.
@param index_name: name of the index we want to reindex
@param table_type: words, pairs or phrases
@param table_prefix: add "tmp_" if you want to
reindex to temporary table
"""
AbstractIndexTable.__init__(self, index_name,
table_type,
table_prefix,
wash_index_terms)
self.mode = "normal"
self.dependent_indexes = dict(get_virtual_index_building_blocks(self.index_id))
def set_reindex_mode(self):
"""
Sets reindex mode. VirtualIndexTable will
remove all its content from database and
use insert_index function to repopulate it.
"""
self.mode = "reindex"
def run_update(self, flush=10000):
"""
Function starts all updating processes for virtual index.
It will take all information about pending changes from database
from queue tables (idxWORD/PAIR/PHRASExxQ), process them
and trigger appropriate indexing functions.
@param flush: how many records we will put in one go
into database (at most);
see also: opt_flush in WordTable class
"""
global chunksize
if self.mode == "reindex":
self.clean_database()
for index_id, index_name in self.dependent_indexes.iteritems():
rng = get_records_range_for_index(index_id)
flush_count = 0
if not rng:
continue
write_message('Virtual index: %s is being reindexed for %s index' % \
(self.index_name, index_name))
chunks = chunk_generator(rng)
try:
while True:
task_sleep_now_if_required()
chunk = chunks.next()
self.insert_index(index_id, chunk[0], chunk[1])
flush_count = flush_count + chunk[1] - chunk[0] + 1
self.recIDs_in_mem.append(list(chunk))
if flush_count >= flush:
flush_count = 0
self.put_into_db()
except StopIteration:
if flush_count > 0:
self.put_into_db()
self.clean_queue_table(index_name)
else:
for index_id, index_name in self.dependent_indexes.iteritems():
query = """SELECT id_bibrec_low, id_bibrec_high, mode FROM %s
WHERE index_name=%%s
ORDER BY runtime ASC""" % \
(self.table_name[:-1] + "Q")
entries = self.remove_duplicates(run_sql(query, (index_name, )))
if entries:
write_message('Virtual index: %s is being updated for %s index' % \
(self.index_name, index_name))
for entry in entries:
operation = None
recID_low, recID_high, mode = entry
if mode == CFG_BIBINDEX_UPDATE_MODE["Update"]:
operation = self.update_index
elif mode == CFG_BIBINDEX_UPDATE_MODE["Remove"]:
operation = self.remove_index
elif mode == CFG_BIBINDEX_UPDATE_MODE["Insert"]:
operation = self.insert_index
flush_count = 0
chunks = chunk_generator([recID_low, recID_high])
try:
while True:
task_sleep_now_if_required()
chunk = chunks.next()
operation(index_id, chunk[0], chunk[1])
flush_count = flush_count + chunk[1] - chunk[0] + 1
self.recIDs_in_mem.append(list(chunk))
if flush_count >= flush:
flush_count = 0
self.put_into_db()
except StopIteration:
if flush_count > 0:
self.put_into_db()
self.clean_queue_table(index_name)
def retrieve_new_values_from_index(self, index_id, records_range):
"""
Retrieves new values from dependent index
for specific range of records.
@param index_id: id of the dependent index
@param records_range: the smallest and the biggest id
in the range: [id_low, id_high]
"""
tab_name = "idx" + self.table_type + ("%02d" % index_id) + "R"
query = """SELECT id_bibrec, termlist FROM %s WHERE id_bibrec
BETWEEN %%s AND %%s""" % tab_name
new_regular_values = run_sql(query, (records_range[0], records_range[1]))
if new_regular_values:
zipped = zip(*new_regular_values)
new_regular_values = dict(zip(zipped[0], map(deserialize_via_marshal, zipped[1])))
else:
new_regular_values = dict()
return new_regular_values
def retrieve_old_values(self, records_range):
"""
Retrieves old values from database for this virtual index
for specific records range.
@param records_range: the smallest and the biggest id
in the range: [id_low, id_high]
"""
virtual_tab_name = self.table_name[:-1] + "R"
query = """SELECT id_bibrec, termlist FROM %s
WHERE type='CURRENT' AND
id_bibrec BETWEEN %%s AND %%s""" % virtual_tab_name
old_virtual_values = run_sql(query, (records_range[0], records_range[1]))
if old_virtual_values:
zipped = zip(*old_virtual_values)
old_virtual_values = dict(zip(zipped[0], map(deserialize_via_marshal, zipped[1])))
else:
old_virtual_values = dict()
return old_virtual_values
def update_index(self, index_id, recID_low, recID_high):
"""
Updates the state of virtual index for records in range:
recID_low, recID_high for index specified by index_id.
Function stores terms in idxWORD/PAIR/PHRASExxR tables with
prefixes for specific index, for example term 'ellis'
from author index will be stored in reversed table as:
'__author__ellis'. It allows fast operations on only part of terms
@param index_id: id of the dependent index we want to remove
@param recID_low: first recID from the range of considered recIDs
@param recID_high: last recID from the range of considered recIDs
"""
index_name = self.dependent_indexes[index_id]
update_cache_for_record = self.update_cache_for_record
virtual_tab_name = self.table_name[:-1] + "R"
# take new values
new_regular_values = self.retrieve_new_values_from_index(index_id, [recID_low, recID_high])
# take old values
old_virtual_values = self.retrieve_old_values([recID_low, recID_high])
# update reversed table
for recID in xrange(recID_low, recID_high + 1):
new_values = new_regular_values.get(recID) or []
old_values = old_virtual_values.get(recID) or []
to_serialize = update_cache_for_record(index_name, recID, old_values, new_values)
if len(to_serialize) == 0:
continue
run_sql("""INSERT INTO %s (id_bibrec,termlist,type)
VALUES (%%s,%%s,'FUTURE')""" % \
wash_table_column_name(virtual_tab_name),
(recID, serialize_via_marshal(to_serialize))) # kwalitee: disable=sql
try:
run_sql("INSERT INTO %s (id_bibrec,termlist,type) VALUES (%%s,%%s,'CURRENT')" % wash_table_column_name(virtual_tab_name), (recID, serialize_via_marshal([]))) # kwalitee: disable=sql
except DatabaseError:
pass
def insert_index(self, index_id, recID_low, recID_high):
"""
Inserts terms from dependent index to virtual table
without looking what's inside the virtual table and
what terms are being added. It's faster than 'updating',
but it can only be used when virtual table is free of
terms from this dependent index.
@param index_id: id of the dependent index we want to remove
@param recID_low: first recID from the range of considered recIDs
@param recID_high: last recID from the range of considered recIDs
"""
index_name = self.dependent_indexes[index_id]
insert_to_cache_for_record = self.insert_to_cache_for_record
virtual_tab_name = self.table_name[:-1] + "R"
# take new values
new_regular_values = self.retrieve_new_values_from_index(index_id, [recID_low, recID_high])
# take old values
old_virtual_values = self.retrieve_old_values([recID_low, recID_high])
# update reversed table
for recID in xrange(recID_low, recID_high + 1):
new_values = new_regular_values.get(recID) or []
old_values = old_virtual_values.get(recID) or []
to_serialize = insert_to_cache_for_record(index_name, recID, old_values, new_values)
if len(to_serialize) == 0:
continue
run_sql("INSERT INTO %s (id_bibrec,termlist,type) VALUES (%%s,%%s,'FUTURE')" % wash_table_column_name(virtual_tab_name), (recID, serialize_via_marshal(to_serialize))) # kwalitee: disable=sql
try:
run_sql("INSERT INTO %s (id_bibrec,termlist,type) VALUES (%%s,%%s,'CURRENT')" % wash_table_column_name(virtual_tab_name), (recID, serialize_via_marshal([]))) # kwalitee: disable=sql
except DatabaseError:
pass
def remove_index(self, index_id, recID_low, recID_high):
"""
Removes words found in dependent index from reversed
table of virtual index. Updates the state of the memory
(for future removal from forward table).
Takes into account that given words can be found in more
that one dependent index and it won't mark these words
for the removal process.
@param index_id: id of the dependent index we want to remove
@param recID_low: first recID from the range of considered recIDs
@param recID_high: last recID from the range of considered recIDs
"""
index_name = self.dependent_indexes[index_id]
remove_from_cache_for_record = self.remove_from_cache_for_record
virtual_tab_name = self.table_name[:-1] + "R"
# take old values
old_virtual_values = self.retrieve_old_values([recID_low, recID_high])
# update reversed table
for recID in xrange(recID_low, recID_high + 1):
old_values = old_virtual_values.get(recID) or []
to_serialize = remove_from_cache_for_record(index_name, recID, old_values)
if len(to_serialize) == 0:
continue
run_sql("INSERT INTO %s (id_bibrec,termlist,type) VALUES (%%s,%%s,'FUTURE')" % wash_table_column_name(virtual_tab_name), (recID, serialize_via_marshal(to_serialize))) # kwalitee: disable=sql
try:
run_sql("INSERT INTO %s (id_bibrec,termlist,type) VALUES (%%s,%%s,'CURRENT')" % wash_table_column_name(virtual_tab_name), (recID, serialize_via_marshal([]))) # kwalitee: disable=sql
except DatabaseError:
pass
def update_cache_for_record(self, index_name, recID, old_values, new_values):
"""
Updates memory (cache) with information on what to
remove/add/modify in forward table for specified record.
It also returns new terms which should be indexed for given record.
@param index_name: index name of dependent index
@param recID: considered record
@param old_values: all old values from all dependent indexes
for this virtual index for recID
@param new_values: new values from some dependent index
which should be added
"""
prefix = make_prefix(index_name)
put = self.put
new_values_prefix = [prefix + term for term in new_values]
part_values = []
tmp_old_values_prefix = []
# split old values from v.index into those with 'prefix' and those without
for term in old_values:
if term.startswith(prefix):
term_without_prefix = re.sub(re_prefix, '', term)
part_values.append(term_without_prefix)
put(recID, term_without_prefix, -1)
else:
tmp_old_values_prefix.append(term)
# remember not to remove words that occur more than once
part_values = set(part_values)
for value in tmp_old_values_prefix:
term_without_prefix = re.sub(re_prefix, '', value)
if term_without_prefix in part_values:
put(recID, term_without_prefix, 1)
for term_without_prefix in new_values:
put(recID, term_without_prefix, 1)
tmp_new_values_prefix = list(tmp_old_values_prefix)
tmp_new_values_prefix.extend(new_values_prefix)
return tmp_new_values_prefix
def insert_to_cache_for_record(self, index_name, recID, old_values, new_values):
"""
Updates cache with terms which should be inserted to database.
Used in insert_index function. See also: update_cache_for_record
which is analogous for update_index function.
"""
prefix = make_prefix(index_name)
append = old_values.append
put = self.put
for term in new_values:
append(prefix + term)
put(recID, term, 1)
return old_values
def remove_from_cache_for_record(self, index_name, recID, old_values):
"""
Updates information in cache with terms which should be removed
from virtual table. Used in remove_index function.
"""
prefix = make_prefix(index_name)
tmp_rest = []
tmp_removed = []
tmp_new_values = []
append_to_new = tmp_new_values.append
append_to_rest = tmp_rest.append
append_to_removed = tmp_removed.append
put = self.put
for term in old_values:
if term.startswith(prefix):
term_without_prefix = re.sub(re_prefix, '', term)
append_to_removed(term_without_prefix)
put(recID, term_without_prefix, -1)
else:
append_to_rest(re.sub(re_prefix, '', term))
append_to_new(term)
to_remember = set(tmp_rest) & set(tmp_removed)
for term_without_prefix in to_remember:
put(recID, term_without_prefix, 1)
return tmp_new_values
def clean_database(self):
"""Removes all entries from corresponding tables in database"""
query = """DELETE FROM %s""" % self.table_name
run_sql(query)
query = """DELETE FROM %s""" % self.table_name[:-1] + "R"
run_sql(query)
def clean_queue_table(self, index_name):
"""
Cleans queue table (i.e. idxWORD/PAIR/PHRASExxQ)
for specific index. It means that function will remove
all entries from db from queue table for this index.
"""
query = "DELETE FROM %s WHERE index_name='%s'" % \
(self.table_name[:-1].lstrip(self.table_prefix) + "Q",
index_name)
run_sql(query)
def remove_duplicates(self, entries):
"""
Removes duplicates from a list of entries (taken from Queue table)
in order to process a single command only once.
Queue table may look like this:
id (..) id_bibrec_low id_bibrec_high index_name mode
...
12 1 100 title update
13 1 100 title update
We don't want to perform the same operation twice. First we want to
squash the same commands into one.
@param entries: list of entries taken from the database
"""
unique = set()
return [entry for entry in entries if entry not in unique and not unique.add(entry)]
def remove_dependent_index(self, index_name):
"""
Removes dependent index from this virtual index.
It means removing all words from all records with prefix:
__index_name__ from reversed table, and removing some of
them from forward table if they don't appear in another
dependent index.
@param index_name: name of the dependent index to remove
"""
flush = 10000
dependent = self.dependent_indexes.values()
if len(dependent) == 0:
write_message("Specified index is not virtual...")
return
if index_name not in dependent:
write_message("Dependent index already removed...")
return
index_id = get_index_id_from_index_name(index_name)
records_range = get_records_range_for_index(index_id)
write_message("Removing an index: %s" % index_name)
if records_range:
flush_count = 0
chunks = chunk_generator([records_range[0], records_range[1]])
try:
while True:
task_sleep_now_if_required()
chunk = chunks.next()
self.remove_index(index_id, chunk[0], chunk[1])
flush_count = flush_count + chunk[1] - chunk[0] + 1
self.recIDs_in_mem.append(chunk)
if flush_count >= flush:
flush_count = 0
self.put_into_db()
except StopIteration:
if flush_count > 0:
self.put_into_db()
class WordTable(AbstractIndexTable):
"""
This class represents a single index table of regular index
(regular means it doesn't accumulates data from other indexes,
but it takes data directly from metadata of records which
are being indexed; for other type of index check: VirtualIndexTable).
To start indexing process one need to invoke add_recIDs() method.
For furher reading see description of this method.
"""
def __init__(self, index_name, table_type, table_prefix="", wash_index_terms=50):
"""Creates words table instance.
@param index_name: the index name
@param index_id: the index integer identificator
@param fields_to_index: a list of fields to index
@param table_type: type of the wordtable: Words, Pairs, Phrases
@param table_prefix: prefix for table name, indexing will be performed
on table: <<table_prefix>>idx<<wordtable_type>>XXF
@param wash_index_terms: do we wash index terms, and if yes (when >0),
how many characters do we keep in the index terms; see
max_char_length parameter of wash_index_term()
"""
AbstractIndexTable.__init__(self, index_name, table_type, table_prefix, wash_index_terms)
self.tags = get_index_tags(index_name, virtual=False)
self.nonmarc_tags = get_index_tags(index_name,
virtual=False,
tagtype="nonmarc")
self.timestamp = datetime.now()
self.virtual_indexes = get_index_virtual_indexes(self.index_id)
self.virtual_index_update_mode = CFG_BIBINDEX_UPDATE_MODE["Update"]
try:
self.stemming_language = get_index_stemming_language(self.index_id)
except KeyError:
self.stemming_language = ''
self.remove_stopwords = get_index_remove_stopwords(self.index_id)
self.remove_html_markup = get_index_remove_html_markup(self.index_id)
self.remove_latex_markup = get_index_remove_latex_markup(self.index_id)
self.tokenizer = get_index_tokenizer(self.index_id)(self.stemming_language,
self.remove_stopwords,
self.remove_html_markup,
self.remove_latex_markup)
self.tokenizer_type = detect_tokenizer_type(self.tokenizer)
self.default_tokenizer_function = self.tokenizer.get_tokenizing_function(table_type)
self.special_tags = self._handle_special_tags()
if self.stemming_language and self.table_name.startswith('idxWORD'):
write_message('%s has stemming enabled, language %s' % (self.table_name, self.stemming_language))
def _handle_special_tags(self):
"""
Fills in a dict with special tags which
always use the same tokenizer and this
tokenizer is independent of index.
"""
special_tags = {}
fields = self.tags + self.nonmarc_tags
for tag in fields:
if tag in CFG_BIBINDEX_SPECIAL_TAGS:
for t in CFG_BIBINDEX_INDEX_TABLE_TYPE:
if self.table_type == CFG_BIBINDEX_INDEX_TABLE_TYPE[t]:
tokenizer_name = CFG_BIBINDEX_SPECIAL_TAGS[tag][t]
tokenizer = _TOKENIZERS[tokenizer_name]
instance = tokenizer(self.stemming_language,
self.remove_stopwords,
self.remove_html_markup,
self.remove_latex_markup)
special_tags[tag] = instance.get_tokenizing_function(self.table_type)
break
return special_tags
def turn_off_virtual_indexes(self):
"""
Prevents from reindexing related virtual indexes.
"""
self.virtual_indexes = []
def turn_on_virtual_indexes(self):
"""
Turns on indexing related virtual indexes.
"""
self.virtual_indexes = get_index_virtual_indexes(self.index_id)
def get_field(self, recID, tag):
"""Returns list of values of the MARC-21 'tag' fields for the
record 'recID'."""
out = []
bibXXx = "bib" + tag[0] + tag[1] + "x"
bibrec_bibXXx = "bibrec_" + bibXXx
query = """SELECT value FROM %s AS b, %s AS bb
WHERE bb.id_bibrec=%%s AND bb.id_bibxxx=b.id
AND tag LIKE %%s""" % (bibXXx, bibrec_bibXXx)
res = run_sql(query, (recID, tag))
for row in res:
out.append(row[0])
return out
def notify_virtual_indexes(self, recID_ranges):
"""
Informs all related virtual indexes about index change.
Function leaves information about the change for each index
in proper table in database (idxSOMETHINGxxQ).
@param recID_ranges: low and high recIDs of ranges
@type recID_ranges: list [[low_id1, high_id1], [low_id2, high_id2]...]
"""
query = """INSERT INTO %s (runtime, id_bibrec_low, id_bibrec_high, index_name, mode)
VALUES (%%s, %%s, %%s, %%s, %%s)"""
for index_id, index_name in self.virtual_indexes:
tab_name = "idx%s%02dQ" % (self.table_type, index_id)
full_query = query % tab_name
for recID_range in recID_ranges:
run_sql(full_query, (self.timestamp,
recID_range[0],
recID_range[1],
self.index_name,
self.virtual_index_update_mode))
def display(self):
"Displays the word table."
keys = self.value.keys()
keys.sort()
for k in keys:
write_message("%s: %s" % (k, self.value[k]))
def count(self):
"Returns the number of words in the table."
return len(self.value)
def info(self):
"Prints some information on the words table."
write_message("The words table contains %d words." % self.count())
def lookup_words(self, word=""):
"Lookup word from the words table."
if not word:
done = 0
while not done:
try:
word = raw_input("Enter word: ")
done = 1
except (EOFError, KeyboardInterrupt):
return
if self.value.has_key(word):
write_message("The word '%s' is found %d times." \
% (word, len(self.value[word])))
else:
write_message("The word '%s' does not exist in the word file."\
% word)
def add_recIDs(self, recIDs, opt_flush):
"""Fetches records which id in the recIDs range list and adds
them to the wordTable. The recIDs range list is of the form:
[[i1_low,i1_high],[i2_low,i2_high], ..., [iN_low,iN_high]].
"""
global chunksize, _last_word_table
flush_count = 0
records_done = 0
records_to_go = 0
for arange in recIDs:
records_to_go = records_to_go + arange[1] - arange[0] + 1
time_started = time.time() # will measure profile time
for arange in recIDs:
i_low = arange[0]
chunksize_count = 0
while i_low <= arange[1]:
task_sleep_now_if_required()
# calculate chunk group of recIDs and treat it:
i_high = min(i_low + opt_flush - flush_count - 1, arange[1])
i_high = min(i_low + chunksize - chunksize_count - 1, i_high)
try:
self.chk_recID_range(i_low, i_high)
except StandardError:
if self.index_name == 'fulltext' and CFG_SOLR_URL:
solr_commit()
raise
write_message(CFG_BIBINDEX_ADDING_RECORDS_STARTED_STR % \
(self.table_name, i_low, i_high))
if CFG_CHECK_MYSQL_THREADS:
kill_sleepy_mysql_threads()
percentage_display = get_percentage_completed(records_done, records_to_go)
task_update_progress("(%s:%s) adding recs %d-%d %s" % (self.table_name, self.index_name, i_low, i_high, percentage_display))
self.del_recID_range(i_low, i_high)
just_processed = self.add_recID_range(i_low, i_high)
flush_count = flush_count + i_high - i_low + 1
chunksize_count = chunksize_count + i_high - i_low + 1
records_done = records_done + just_processed
write_message(CFG_BIBINDEX_ADDING_RECORDS_STARTED_STR % \
(self.table_name, i_low, i_high))
if chunksize_count >= chunksize:
chunksize_count = 0
# flush if necessary:
if flush_count >= opt_flush:
self.put_into_db()
self.clean()
if self.index_name == 'fulltext' and CFG_SOLR_URL:
solr_commit()
write_message("%s backing up" % (self.table_name))
flush_count = 0
self.log_progress(time_started, records_done, records_to_go)
# iterate:
i_low = i_high + 1
if flush_count > 0:
self.put_into_db()
if self.index_name == 'fulltext' and CFG_SOLR_URL:
solr_commit()
self.log_progress(time_started, records_done, records_to_go)
self.notify_virtual_indexes(recIDs)
def add_recID_range(self, recID1, recID2):
"""Add records from RECID1 to RECID2."""
wlist = {}
self.recIDs_in_mem.append([recID1, recID2])
# special case of author indexes where we also add author
# canonical IDs:
if self.index_name in ('author', 'firstauthor', 'exactauthor', 'exactfirstauthor'):
for recID in range(recID1, recID2 + 1):
if not wlist.has_key(recID):
wlist[recID] = []
wlist[recID] = list_union(get_author_canonical_ids_for_recid(recID),
wlist[recID])
marc, nonmarc = self.find_nonmarc_records(recID1, recID2)
if marc:
collector = TermCollector(self.tokenizer,
self.tokenizer_type,
self.table_type,
self.tags,
[recID1, recID2])
collector.set_special_tags(self.special_tags)
wlist = collector.collect(marc, wlist)
if nonmarc:
collector = NonmarcTermCollector(self.tokenizer,
self.tokenizer_type,
self.table_type,
self.nonmarc_tags,
[recID1, recID2])
collector.set_special_tags(self.special_tags)
wlist = collector.collect(nonmarc, wlist)
# lookup index-time synonyms:
synonym_kbrs = get_all_synonym_knowledge_bases()
if synonym_kbrs.has_key(self.index_name):
if len(wlist) == 0: return 0
recIDs = wlist.keys()
for recID in recIDs:
for word in wlist[recID]:
word_synonyms = get_synonym_terms(word,
synonym_kbrs[self.index_name][0],
synonym_kbrs[self.index_name][1],
use_memoise=True)
if word_synonyms:
wlist[recID] = list_union(word_synonyms, wlist[recID])
# were there some words for these recIDs found?
recIDs = wlist.keys()
for recID in recIDs:
# was this record marked as deleted?
if "DELETED" in self.get_field(recID, "980__c"):
wlist[recID] = []
write_message("... record %d was declared deleted, removing its word list" % recID, verbose=9)
write_message("... record %d, termlist: %s" % (recID, wlist[recID]), verbose=9)
if len(wlist) == 0: return 0
# put words into reverse index table with FUTURE status:
for recID in recIDs:
run_sql("INSERT INTO %sR (id_bibrec,termlist,type) VALUES (%%s,%%s,'FUTURE')" % wash_table_column_name(self.table_name[:-1]), (recID, serialize_via_marshal(wlist[recID]))) # kwalitee: disable=sql
# ... and, for new records, enter the CURRENT status as empty:
try:
run_sql("INSERT INTO %sR (id_bibrec,termlist,type) VALUES (%%s,%%s,'CURRENT')" % wash_table_column_name(self.table_name[:-1]), (recID, serialize_via_marshal([]))) # kwalitee: disable=sql
except DatabaseError:
# okay, it's an already existing record, no problem
pass
# put words into memory word list:
put = self.put
for recID in recIDs:
for w in wlist[recID]:
put(recID, w, 1)
return len(recIDs)
def find_nonmarc_records(self, recID1, recID2):
"""Divides recID range into two different tables,
first one contains only recIDs of the records that
are Marc type and the second one contains records
of nonMarc type"""
marc = range(recID1, recID2 + 1)
nonmarc = []
query = """SELECT id FROM %s WHERE master_format <> 'marc'
AND id BETWEEN %%s AND %%s""" % "bibrec"
res = run_sql(query, (recID1, recID2))
if res:
nonmarc = list(zip(*res)[0])
if len(nonmarc) == (recID2 - recID1 + 1):
nonmarc = xrange(recID1, recID2 + 1)
marc = []
else:
for recID in nonmarc:
marc.remove(recID)
else:
marc = xrange(recID1, recID2 + 1)
return [marc, nonmarc]
def log_progress(self, start, done, todo):
"""Calculate progress and store it.
start: start time,
done: records processed,
todo: total number of records"""
time_elapsed = time.time() - start
# consistency check
if time_elapsed == 0 or done > todo:
return
time_recs_per_min = done / (time_elapsed / 60.0)
write_message("%d records took %.1f seconds to complete.(%1.f recs/min)"\
% (done, time_elapsed, time_recs_per_min))
if time_recs_per_min:
write_message("Estimated runtime: %.1f minutes" % \
((todo - done) / time_recs_per_min))
def put(self, recID, word, sign):
"""Keeps track of changes done during indexing
and stores these changes in memory for further use.
Indexing process needs this information later while
filling in the database.
@param recID: recID of the record we want to update in memory
@param word: word we want to update
@param sing: sign of the word, 1 means keep this word in database,
-1 remove word from database
"""
value = self.value
try:
if self.wash_index_terms:
word = wash_index_term(word, self.wash_index_terms)
if value.has_key(word):
# the word 'word' exist already: update sign
value[word][recID] = sign
else:
value[word] = {recID: sign}
except Exception as e:
write_message(
"Error: Cannot put word %s with sign %d for recID %s (%s)."
% (word, sign, recID, e)
)
def del_recIDs(self, recIDs):
"""Fetches records which id in the recIDs range list and adds
them to the wordTable. The recIDs range list is of the form:
[[i1_low,i1_high],[i2_low,i2_high], ..., [iN_low,iN_high]].
"""
count = 0
for arange in recIDs:
task_sleep_now_if_required()
self.del_recID_range(arange[0], arange[1])
count = count + arange[1] - arange[0]
self.virtual_index_update_mode = CFG_BIBINDEX_UPDATE_MODE["Remove"]
self.put_into_db()
self.notify_virtual_indexes(recIDs)
if self.index_name == 'fulltext' and CFG_SOLR_URL:
solr_commit()
def del_recID_range(self, low, high):
"""Deletes records with 'recID' system number between low
and high from memory words index table."""
write_message("%s fetching existing words for records #%d-#%d started" % \
(self.table_name, low, high), verbose=3)
self.recIDs_in_mem.append([low, high])
query = """SELECT id_bibrec,termlist FROM %sR as bb WHERE bb.id_bibrec
BETWEEN %%s AND %%s""" % (self.table_name[:-1])
recID_rows = run_sql(query, (low, high))
for recID_row in recID_rows:
recID = recID_row[0]
wlist = deserialize_via_marshal(recID_row[1])
for word in wlist:
self.put(recID, word, -1)
write_message("%s fetching existing words for records #%d-#%d ended" % \
(self.table_name, low, high), verbose=3)
def check_bad_words(self):
"""
Finds bad words in reverse tables. Returns True in case of bad words.
"""
query = """SELECT 1 FROM %sR WHERE type IN ('TEMPORARY','FUTURE') LIMIT 1""" \
% (self.table_name[:-1],)
res = run_sql(query)
return bool(res)
def report_on_table_consistency(self):
"""Check reverse words index tables (e.g. idxWORD01R) for
interesting states such as 'TEMPORARY' state.
Prints small report (no of words, no of bad words).
"""
# find number of words:
query = """SELECT COUNT(1) FROM %s""" % (self.table_name)
res = run_sql(query, None, 1)
if res:
nb_words = res[0][0]
else:
nb_words = 0
# report stats:
write_message("%s contains %d words" % (self.table_name, nb_words))
# find possible bad states in reverse tables:
if self.check_bad_words():
write_message("EMERGENCY: %s needs to be repaired" %
(self.table_name, ))
else:
write_message("%s is in consistent state" % (self.table_name))
def repair(self, opt_flush):
"""Repair the whole table"""
# find possible bad states in reverse tables:
if not self.check_bad_words():
return
query = """SELECT id_bibrec FROM %sR WHERE type IN ('TEMPORARY','FUTURE')""" \
% (self.table_name[:-1])
res = intbitset(run_sql(query))
recIDs = create_range_list(list(res))
flush_count = 0
records_done = 0
records_to_go = 0
for arange in recIDs:
records_to_go = records_to_go + arange[1] - arange[0] + 1
time_started = time.time() # will measure profile time
for arange in recIDs:
i_low = arange[0]
chunksize_count = 0
while i_low <= arange[1]:
task_sleep_now_if_required()
# calculate chunk group of recIDs and treat it:
i_high = min(i_low + opt_flush - flush_count - 1, arange[1])
i_high = min(i_low + chunksize - chunksize_count - 1, i_high)
self.fix_recID_range(i_low, i_high)
flush_count = flush_count + i_high - i_low + 1
chunksize_count = chunksize_count + i_high - i_low + 1
records_done = records_done + i_high - i_low + 1
if chunksize_count >= chunksize:
chunksize_count = 0
# flush if necessary:
if flush_count >= opt_flush:
self.put_into_db("emergency")
self.clean()
flush_count = 0
self.log_progress(time_started, records_done, records_to_go)
# iterate:
i_low = i_high + 1
if flush_count > 0:
self.put_into_db("emergency")
self.log_progress(time_started, records_done, records_to_go)
write_message("%s inconsistencies repaired." % self.table_name)
def chk_recID_range(self, low, high):
"""Check if the reverse index table is in proper state"""
## check db
query = """SELECT 1 FROM %sR WHERE type IN ('TEMPORARY','FUTURE')
AND id_bibrec BETWEEN %%s AND %%s LIMIT 1""" % self.table_name[:-1]
res = run_sql(query, (low, high), 1)
if not res:
write_message("%s for %d-%d is in consistent state" % (self.table_name, low, high))
return # okay, words table is consistent
## inconsistency detected!
write_message("EMERGENCY: %s inconsistencies detected..." % self.table_name)
error_message = "Errors found. You should check consistency of the " \
"%s - %sR tables.\nRunning 'bibindex --repair' is " \
"recommended." % (self.table_name, self.table_name[:-1])
write_message("EMERGENCY: " + error_message, stream=sys.stderr)
raise StandardError(error_message)
def fix_recID_range(self, low, high):
"""Try to fix reverse index database consistency
(e.g. table idxWORD01R) in the low,high doc-id range.
Possible states for a recID follow:
CUR TMP FUT: very bad things have happened: warn!
CUR TMP : very bad things have happened: warn!
CUR FUT: delete FUT (crash before flushing)
CUR : database is ok
TMP FUT: add TMP to memory and del FUT from memory
flush (revert to old state)
TMP : very bad things have happened: warn!
FUT: very bad things have happended: warn!
"""
state = {}
query = "SELECT id_bibrec,type FROM %sR WHERE id_bibrec BETWEEN %%s AND %%s"\
% self.table_name[:-1]
res = run_sql(query, (low, high))
for row in res:
if not state.has_key(row[0]):
state[row[0]] = []
state[row[0]].append(row[1])
ok = 1 # will hold info on whether we will be able to repair
for recID in state.keys():
if not 'TEMPORARY' in state[recID]:
if 'FUTURE' in state[recID]:
if 'CURRENT' not in state[recID]:
write_message("EMERGENCY: Index record %d is in inconsistent state. Can't repair it." % recID)
ok = 0
else:
write_message("EMERGENCY: Inconsistency in index record %d detected" % recID)
query = """DELETE FROM %sR
WHERE id_bibrec=%%s""" % self.table_name[:-1]
run_sql(query, (recID,))
write_message("EMERGENCY: Inconsistency in record %d repaired." % recID)
else:
if 'FUTURE' in state[recID] and not 'CURRENT' in state[recID]:
self.recIDs_in_mem.append([recID, recID])
# Get the words file
query = """SELECT type,termlist FROM %sR
WHERE id_bibrec=%%s""" % self.table_name[:-1]
write_message(query, verbose=9)
res = run_sql(query, (recID,))
for row in res:
wlist = deserialize_via_marshal(row[1])
write_message("Words are %s " % wlist, verbose=9)
if row[0] == 'TEMPORARY':
sign = 1
else:
sign = -1
for word in wlist:
self.put(recID, word, sign)
else:
write_message("EMERGENCY: %s for %d is in inconsistent "
"state. Couldn't repair it." % (self.table_name,
recID), stream=sys.stderr)
ok = 0
if not ok:
error_message = "Unrepairable errors found. You should check " \
"consistency of the %s - %sR tables. Deleting affected " \
"TEMPORARY and FUTURE entries from these tables is " \
"recommended; see the BibIndex Admin Guide." % \
(self.table_name, self.table_name[:-1])
write_message("EMERGENCY: " + error_message, stream=sys.stderr)
raise StandardError(error_message)
def main():
"""Main that construct all the bibtask."""
task_init(authorization_action='runbibindex',
authorization_msg="BibIndex Task Submission",
description="""Examples:
\t%s -a -i 234-250,293,300-500 -u admin@localhost
\t%s -a -w author,fulltext -M 8192 -v3
\t%s -d -m +4d -A on --flush=10000\n""" % ((sys.argv[0],) * 3), help_specific_usage=""" Indexing options:
-a, --add\t\tadd or update words for selected records
-d, --del\t\tdelete words for selected records
-i, --id=low[-high]\t\tselect according to doc recID
-m, --modified=from[,to]\tselect according to modification date
-c, --collection=c1[,c2]\tselect according to collection
-R, --reindex\treindex the selected indexes from scratch
Repairing options:
-k, --check\t\tcheck consistency for all records in the table(s)
-r, --repair\t\ttry to repair all records in the table(s)
Specific options:
-w, --windex=w1[,w2]\tword/phrase indexes to consider (all)
-M, --maxmem=XXX\tmaximum memory usage in kB (no limit)
-f, --flush=NNN\t\tfull consistent table flush after NNN records (10000)
--force\t\tforce indexing of all records for provided indexes
-Z, --remove-dependent-index=w name of an index for removing from virtual index
-l --all-virtual\t\t set of all virtual indexes; the same as: -w virtual_ind1, virtual_ind2, ...
""",
version=__revision__,
specific_params=("adi:m:c:w:krRM:f:oZ:l", [
"add",
"del",
"id=",
"modified=",
"collection=",
"windex=",
"check",
"repair",
"reindex",
"maxmem=",
"flush=",
"force",
"remove-dependent-index=",
"all-virtual"
]),
task_stop_helper_fnc=task_stop_table_close_fnc,
task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter,
task_run_fnc=task_run_core,
task_submit_check_options_fnc=task_submit_check_options)
def task_submit_check_options():
"""Check for options compatibility."""
if task_get_option("reindex"):
if task_get_option("cmd") != "add" or task_get_option('id') or task_get_option('collection'):
print >> sys.stderr, "ERROR: You can use --reindex only when adding modified record."
return False
return True
def task_submit_elaborate_specific_parameter(key, value, opts, args):
""" Given the string key it checks it's meaning, eventually using the
value. Usually it fills some key in the options dict.
It must return True if it has elaborated the key, False, if it doesn't
know that key.
eg:
if key in ['-n', '--number']:
self.options['number'] = value
return True
return False
"""
if key in ("-a", "--add"):
task_set_option("cmd", "add")
if ("-x", "") in opts or ("--del", "") in opts:
raise StandardError("Can not have --add and --del at the same time!")
elif key in ("-k", "--check"):
task_set_option("cmd", "check")
elif key in ("-r", "--repair"):
task_set_option("cmd", "repair")
elif key in ("-d", "--del"):
task_set_option("cmd", "del")
elif key in ("-i", "--id"):
task_set_option('id', task_get_option('id') + split_ranges(value))
elif key in ("-m", "--modified"):
task_set_option("modified", get_date_range(value))
elif key in ("-c", "--collection"):
task_set_option("collection", value)
elif key in ("-R", "--reindex"):
task_set_option("reindex", True)
elif key in ("-w", "--windex"):
task_set_option("windex", value)
elif key in ("-M", "--maxmem"):
task_set_option("maxmem", int(value))
if task_get_option("maxmem") < base_process_size + 1000:
raise StandardError("Memory usage should be higher than %d kB" % \
(base_process_size + 1000))
elif key in ("-f", "--flush"):
task_set_option("flush", int(value))
elif key in ("-o", "--force"):
task_set_option("force", True)
elif key in ("-Z", "--remove-dependent-index",):
task_set_option("remove-dependent-index", value)
elif key in ("-l", "--all-virtual",):
task_set_option("all-virtual", True)
else:
return False
return True
def task_stop_table_close_fnc():
""" Close tables to STOP. """
global _last_word_table
if _last_word_table:
_last_word_table.put_into_db()
def get_recIDs_by_date_bibliographic(dates, index_name, force_all=False):
""" Finds records that were modified between DATES[0] and DATES[1]
for given index.
If DATES is not set, then finds records that were modified since
the last update of the index.
@param wordtable_type: can be 'Words', 'Pairs' or 'Phrases'
"""
index_id = get_index_id_from_index_name(index_name)
if not dates:
query = """SELECT last_updated FROM idxINDEX WHERE id=%s"""
res = run_sql(query, (index_id,))
if not res:
return set([])
if not res[0][0] or force_all:
dates = ("0000-00-00", None)
else:
dates = (res[0][0], None)
if dates[1] is None:
res = intbitset(run_sql("""SELECT b.id FROM bibrec AS b WHERE b.modification_date >= %s""",
(dates[0],)))
if index_name == 'fulltext':
res |= intbitset(run_sql("""SELECT id_bibrec FROM bibrec_bibdoc JOIN bibdoc ON id_bibdoc=id
WHERE text_extraction_date <= modification_date AND
modification_date >= %s
AND status<>'DELETED'""",
(dates[0],)))
elif dates[0] is None:
res = intbitset(run_sql("""SELECT b.id FROM bibrec AS b WHERE b.modification_date <= %s""",
(dates[1],)))
if index_name == 'fulltext':
res |= intbitset(run_sql("""SELECT id_bibrec FROM bibrec_bibdoc JOIN bibdoc ON id_bibdoc=id
WHERE text_extraction_date <= modification_date
AND modification_date <= %s
AND status<>'DELETED'""",
(dates[1],)))
else:
res = intbitset(run_sql("""SELECT b.id FROM bibrec AS b
WHERE b.modification_date >= %s AND
b.modification_date <= %s""",
(dates[0], dates[1])))
if index_name == 'fulltext':
res |= intbitset(run_sql("""SELECT id_bibrec FROM bibrec_bibdoc JOIN bibdoc ON id_bibdoc=id
WHERE text_extraction_date <= modification_date AND
modification_date >= %s AND
modification_date <= %s AND
status<>'DELETED'""",
(dates[0], dates[1],)))
# special case of author indexes where we need to re-index
# those records that were affected by changed BibAuthorID attributions:
if index_name in ('author', 'firstauthor', 'exactauthor', 'exactfirstauthor'):
from invenio.bibauthorid_personid_maintenance import get_recids_affected_since
# dates[1] is ignored, since BibAuthorID API does not offer upper limit search
rec_list_author = get_recids_affected_since(dates[0], dates[1])
res = res | rec_list_author
return set(res)
def get_recIDs_by_date_authority(dates, index_name, force_all=False):
""" Finds records that were modified between DATES[0] and DATES[1]
for given index.
If DATES is not set, then finds records that were modified since
the last update of the index.
Searches for bibliographic records connected to authority records
that have been changed.
"""
index_id = get_index_id_from_index_name(index_name)
index_tags = get_index_tags(index_name)
if not dates:
query = """SELECT last_updated FROM idxINDEX WHERE id=%s"""
res = run_sql(query, (index_id,))
if not res:
return set([])
if not res[0][0] or force_all:
dates = ("0000-00-00", None)
else:
dates = (res[0][0], None)
res = intbitset()
for tag in index_tags:
pattern = tag.replace('%', '*')
matches = fnmatch.filter(CFG_BIBAUTHORITY_CONTROLLED_FIELDS_BIBLIOGRAPHIC.keys(), pattern)
if not len(matches):
continue
for tag_match in matches:
# get the type of authority record associated with this field
auth_type = CFG_BIBAUTHORITY_CONTROLLED_FIELDS_BIBLIOGRAPHIC.get(tag_match)
# find updated authority records of this type
# dates[1] is ignored, needs dates[0] to find res
now = datetime.now()
auth_recIDs = search_pattern(p='980__a:' + auth_type) \
& search_unit_in_bibrec(str(dates[0]), str(now), search_type='m')
# now find dependent bibliographic records
for auth_recID in auth_recIDs:
# get the fix authority identifier of this authority record
control_nos = get_control_nos_from_recID(auth_recID)
# there may be multiple control number entries! (the '035' field is repeatable!)
for control_no in control_nos:
# get the bibrec IDs that refer to AUTHORITY_ID in TAG
tag_0 = tag_match[:5] + '0' # possibly do the same for '4' subfields ?
fieldvalue = '"' + control_no + '"'
res |= search_pattern(p=tag_0 + ':' + fieldvalue)
return set(res)
def get_not_updated_recIDs(modified_dates, indexes, force_all=False):
"""Finds not updated recIDs in database for indexes.
@param modified_dates: between this dates we should look for modified records
@type modified_dates: [date_old, date_new]
@param indexes: list of indexes
@type indexes: string separated by coma
@param force_all: if True all records will be taken
"""
found_recIDs = set()
write_message(CFG_BIBINDEX_UPDATE_MESSAGE)
for index in indexes:
found_recIDs |= get_recIDs_by_date_bibliographic(modified_dates, index, force_all)
found_recIDs |= get_recIDs_by_date_authority(modified_dates, index, force_all)
return list(sorted(found_recIDs))
def get_recIDs_from_cli(indexes=[]):
"""
Gets recIDs ranges from CLI for indexing when
user specified 'id' or 'collection' option or
search for modified recIDs for provided indexes
when recIDs are not specified.
@param indexes: it's a list of specified indexes, which
can be obtained from CLI with use of:
get_indexes_from_cli() function.
@type indexes: list of strings
"""
# need to first update idxINDEX table to find proper recIDs for reindexing
if task_get_option("reindex"):
for index_name in indexes:
run_sql("""UPDATE idxINDEX SET last_updated='0000-00-00 00:00:00'
WHERE name=%s""", (index_name,))
if task_get_option("id"):
return task_get_option("id")
elif task_get_option("collection"):
l_of_colls = task_get_option("collection").split(",")
recIDs = perform_request_search(c=l_of_colls)
recIDs_range = []
for recID in recIDs:
recIDs_range.append([recID, recID])
return recIDs_range
elif task_get_option("cmd") == "add":
recs = get_not_updated_recIDs(task_get_option("modified"),
indexes,
task_get_option("force"))
recIDs_range = beautify_range_list(create_range_list(recs))
return recIDs_range
return []
def get_indexes_from_cli():
"""
Gets indexes from CLI and checks if they are
valid. If indexes weren't specified function
will return all known indexes.
"""
indexes = task_get_option("windex")
all_virtual = task_get_option("all-virtual")
if all_virtual:
indexes = filter_for_virtual_indexes(get_all_indexes())
elif not indexes:
indexes = get_all_indexes()
else:
indexes = indexes.split(",")
indexes = remove_inexistent_indexes(indexes, leave_virtual=True)
return indexes
def remove_dependent_index(virtual_indexes, dependent_index):
"""
Removes dependent index from virtual indexes.
@param virtual_indexes: names of virtual_indexes
@type virtual_indexes: list of strings
@param dependent_index: name of dependent index
@type dependent_index: string
"""
if not virtual_indexes:
write_message("You should specify a name of a virtual index...")
return
id_dependent = get_index_id_from_index_name(dependent_index)
for index_name in virtual_indexes:
index_id = get_index_id_from_index_name(index_name)
for type_ in CFG_BIBINDEX_INDEX_TABLE_TYPE.itervalues():
vit = VirtualIndexTable(index_name, type_)
vit.remove_dependent_index(dependent_index)
task_sleep_now_if_required()
query = """DELETE FROM idxINDEX_idxINDEX WHERE id_virtual=%s AND id_normal=%s"""
run_sql(query, (index_id, id_dependent))
def should_update_virtual_indexes():
"""
Decides if any virtual indexes should be updated.
Decision is made based on arguments obtained
from CLI.
"""
return task_get_option("all-virtual") or task_get_option("windex")
def update_virtual_indexes(virtual_indexes, reindex=False):
"""
Function will update all specified virtual_indexes.
@param virtual_indexes: list of index names
@param reindex: shall we reindex given v.indexes from scratch?
"""
kwargs = {}
if reindex:
kwargs.update({'table_prefix': 'tmp_'})
for index_name in virtual_indexes:
if reindex:
index_id = get_index_id_from_index_name(index_name)
init_temporary_reindex_tables(index_id)
for key, type_ in CFG_BIBINDEX_INDEX_TABLE_TYPE.iteritems():
kwargs.update({'wash_index_terms': CFG_BIBINDEX_WASH_INDEX_TERMS[key]})
vit = VirtualIndexTable(index_name, type_, **kwargs)
vit.set_reindex_mode()
vit.run_update()
swap_temporary_reindex_tables(index_id)
update_index_last_updated([index_name], task_get_task_param('task_starting_time'))
task_sleep_now_if_required(can_stop_too=True)
else:
for key, type_ in CFG_BIBINDEX_INDEX_TABLE_TYPE.iteritems():
kwargs.update({'wash_index_terms': CFG_BIBINDEX_WASH_INDEX_TERMS[key]})
vit = VirtualIndexTable(index_name, type_, **kwargs)
vit.run_update()
task_sleep_now_if_required(can_stop_too=True)
def task_run_core():
"""Runs the task by fetching arguments from the BibSched task queue.
This is what BibSched will be invoking via daemon call.
"""
global _last_word_table
indexes = get_indexes_from_cli()
if len(indexes) == 0:
write_message("Specified indexes can't be found.")
return True
virtual_indexes = filter_for_virtual_indexes(indexes)
regular_indexes = list(set(indexes) - set(virtual_indexes))
# check tables consistency
if task_get_option("cmd") == "check":
for index_name in indexes:
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
wash_index_terms=50)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Pairs"],
wash_index_terms=100)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Phrases"],
wash_index_terms=0)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
_last_word_table = None
return True
# virtual index: remove dependent index
if task_get_option("remove-dependent-index"):
remove_dependent_index(indexes,
task_get_option("remove-dependent-index"))
return True
# virtual index: update
if should_update_virtual_indexes():
update_virtual_indexes(virtual_indexes, task_get_option("reindex"))
if len(regular_indexes) == 0:
return True
# regular index: initialization for Words,Pairs,Phrases
recIDs_range = get_recIDs_from_cli(regular_indexes)
# FIXME: restore when the hstRECORD table race condition between
# bibupload and bibindex is solved
# recIDs_for_index = find_affected_records_for_index(regular_indexes,
# recIDs_range,
# (task_get_option("force") or \
# task_get_option("reindex") or \
# task_get_option("cmd") == "del"))
recIDs_for_index = find_affected_records_for_index(regular_indexes,
recIDs_range,
True)
if len(recIDs_for_index.keys()) == 0:
write_message("Selected indexes/recIDs are up to date.")
# Let's work on single words!
for index_name in recIDs_for_index.keys():
index_id = get_index_id_from_index_name(index_name)
reindex_prefix = ""
if task_get_option("reindex"):
reindex_prefix = "tmp_"
init_temporary_reindex_tables(index_id, reindex_prefix)
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
table_prefix=reindex_prefix,
wash_index_terms=50)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
try:
if task_get_option("cmd") == "del":
if task_get_option("id") or task_get_option("collection"):
wordTable.del_recIDs(recIDs_range)
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Missing IDs of records to delete from " \
"index %s." % wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
elif task_get_option("cmd") == "add":
final_recIDs = beautify_range_list(create_range_list(recIDs_for_index[index_name]))
wordTable.add_recIDs(final_recIDs, task_get_option("flush"))
task_sleep_now_if_required(can_stop_too=True)
elif task_get_option("cmd") == "repair":
wordTable.repair(task_get_option("flush"))
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Invalid command found processing %s" % \
wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
except StandardError, e:
write_message("Exception caught: %s" % e, sys.stderr)
register_exception(alert_admin=True)
if _last_word_table:
_last_word_table.put_into_db()
raise
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
# Let's work on pairs now
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Pairs"],
table_prefix=reindex_prefix,
wash_index_terms=100)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
try:
if task_get_option("cmd") == "del":
if task_get_option("id") or task_get_option("collection"):
wordTable.del_recIDs(recIDs_range)
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Missing IDs of records to delete from " \
"index %s." % wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
elif task_get_option("cmd") == "add":
final_recIDs = beautify_range_list(create_range_list(recIDs_for_index[index_name]))
wordTable.add_recIDs(final_recIDs, task_get_option("flush"))
task_sleep_now_if_required(can_stop_too=True)
elif task_get_option("cmd") == "repair":
wordTable.repair(task_get_option("flush"))
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Invalid command found processing %s" % \
wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
except StandardError, e:
write_message("Exception caught: %s" % e, sys.stderr)
register_exception()
if _last_word_table:
_last_word_table.put_into_db()
raise
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
# Let's work on phrases now
wordTable = WordTable(index_name=index_name,
table_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Phrases"],
table_prefix=reindex_prefix,
wash_index_terms=0)
_last_word_table = wordTable
wordTable.report_on_table_consistency()
try:
if task_get_option("cmd") == "del":
if task_get_option("id") or task_get_option("collection"):
wordTable.del_recIDs(recIDs_range)
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Missing IDs of records to delete from " \
"index %s." % wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
elif task_get_option("cmd") == "add":
final_recIDs = beautify_range_list(create_range_list(recIDs_for_index[index_name]))
wordTable.add_recIDs(final_recIDs, task_get_option("flush"))
if not task_get_option("id") and not task_get_option("collection"):
update_index_last_updated([index_name], task_get_task_param('task_starting_time'))
task_sleep_now_if_required(can_stop_too=True)
elif task_get_option("cmd") == "repair":
wordTable.repair(task_get_option("flush"))
task_sleep_now_if_required(can_stop_too=True)
else:
error_message = "Invalid command found processing %s" % \
wordTable.table_name
write_message(error_message, stream=sys.stderr)
raise StandardError(error_message)
except StandardError, e:
write_message("Exception caught: %s" % e, sys.stderr)
register_exception()
if _last_word_table:
_last_word_table.put_into_db()
raise
wordTable.report_on_table_consistency()
task_sleep_now_if_required(can_stop_too=True)
if task_get_option("reindex"):
swap_temporary_reindex_tables(index_id, reindex_prefix)
update_index_last_updated([index_name], task_get_task_param('task_starting_time'))
task_sleep_now_if_required(can_stop_too=True)
# update modification date also for indexes that were up to date
if not task_get_option("id") and not task_get_option("collection") and \
task_get_option("cmd") == "add":
up_to_date = set(indexes) - set(recIDs_for_index.keys())
update_index_last_updated(list(up_to_date), task_get_task_param('task_starting_time'))
_last_word_table = None
return True
### okay, here we go:
if __name__ == '__main__':
main()
| gpl-2.0 |
google/mobly | mobly/controllers/android_device.py | 1 | 38734 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import logging
import os
import re
import shutil
import time
from mobly import logger as mobly_logger
from mobly import runtime_test_info
from mobly import utils
from mobly.controllers.android_device_lib import adb
from mobly.controllers.android_device_lib import errors
from mobly.controllers.android_device_lib import fastboot
from mobly.controllers.android_device_lib import service_manager
from mobly.controllers.android_device_lib.services import logcat
from mobly.controllers.android_device_lib.services import snippet_management_service
# Convenience constant for the package of Mobly Bundled Snippets
# (http://github.com/google/mobly-bundled-snippets).
MBS_PACKAGE = 'com.google.android.mobly.snippet.bundled'
MOBLY_CONTROLLER_CONFIG_NAME = 'AndroidDevice'
ANDROID_DEVICE_PICK_ALL_TOKEN = '*'
_DEBUG_PREFIX_TEMPLATE = '[AndroidDevice|%s] %s'
# Key name for adb logcat extra params in config file.
ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = 'adb_logcat_param'
ANDROID_DEVICE_EMPTY_CONFIG_MSG = 'Configuration is empty, abort!'
ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = 'Configuration should be a list, abort!'
# System properties that are cached by the `AndroidDevice.build_info` property.
# The only properties on this list should be read-only system properties.
CACHED_SYSTEM_PROPS = [
'ro.build.id',
'ro.build.type',
'ro.build.version.codename',
'ro.build.version.sdk',
'ro.build.product',
'ro.build.characteristics',
'ro.debuggable',
'ro.product.name',
'ro.hardware',
]
# Keys for attributes in configs that alternate the controller module behavior.
# If this is False for a device, errors from that device will be ignored
# during `create`. Default is True.
KEY_DEVICE_REQUIRED = 'required'
DEFAULT_VALUE_DEVICE_REQUIRED = True
# If True, logcat collection will not be started during `create`.
# Default is False.
KEY_SKIP_LOGCAT = 'skip_logcat'
DEFAULT_VALUE_SKIP_LOGCAT = False
SERVICE_NAME_LOGCAT = 'logcat'
# Default name for bug reports taken without a specified test name.
DEFAULT_BUG_REPORT_NAME = 'bugreport'
# Default Timeout to wait for boot completion
DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND = 15 * 60
# Timeout for the adb command for taking a screenshot
TAKE_SCREENSHOT_TIMEOUT_SECOND = 10
# Aliases of error types for backward compatibility.
Error = errors.Error
DeviceError = errors.DeviceError
SnippetError = snippet_management_service.Error
# Regex to heuristically determine if the device is an emulator.
EMULATOR_SERIAL_REGEX = re.compile(r'emulator-\d+')
def create(configs):
"""Creates AndroidDevice controller objects.
Args:
configs: A list of dicts, each representing a configuration for an
Android device.
Returns:
A list of AndroidDevice objects.
"""
if not configs:
raise Error(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
ads = get_all_instances()
elif not isinstance(configs, list):
raise Error(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
elif isinstance(configs[0], dict):
# Configs is a list of dicts.
ads = get_instances_with_configs(configs)
elif isinstance(configs[0], str):
# Configs is a list of strings representing serials.
ads = get_instances(configs)
else:
raise Error('No valid config found in: %s' % configs)
_start_services_on_ads(ads)
return ads
def destroy(ads):
"""Cleans up AndroidDevice objects.
Args:
ads: A list of AndroidDevice objects.
"""
for ad in ads:
try:
ad.services.stop_all()
except Exception:
ad.log.exception('Failed to clean up properly.')
def get_info(ads):
"""Get information on a list of AndroidDevice objects.
Args:
ads: A list of AndroidDevice objects.
Returns:
A list of dict, each representing info for an AndroidDevice objects.
"""
return [ad.device_info for ad in ads]
def _validate_device_existence(serials):
"""Validate that all the devices specified by the configs can be reached.
Args:
serials: list of strings, the serials of all the devices that are expected
to exist.
"""
valid_ad_identifiers = list_adb_devices() + list_adb_devices_by_usb_id()
for serial in serials:
if serial not in valid_ad_identifiers:
raise Error(f'Android device serial "{serial}" is specified in '
'config but is not reachable.')
def _start_services_on_ads(ads):
"""Starts long running services on multiple AndroidDevice objects.
If any one AndroidDevice object fails to start services, cleans up all
AndroidDevice objects and their services.
Args:
ads: A list of AndroidDevice objects whose services to start.
"""
for ad in ads:
start_logcat = not getattr(ad, KEY_SKIP_LOGCAT, DEFAULT_VALUE_SKIP_LOGCAT)
try:
if start_logcat:
ad.services.logcat.start()
except Exception:
is_required = getattr(ad, KEY_DEVICE_REQUIRED,
DEFAULT_VALUE_DEVICE_REQUIRED)
if is_required:
ad.log.exception('Failed to start some services, abort!')
destroy(ads)
raise
else:
ad.log.exception('Skipping this optional device because some '
'services failed to start.')
def parse_device_list(device_list_str, key):
"""Parses a byte string representing a list of devices.
The string is generated by calling either adb or fastboot. The tokens in
each string is tab-separated.
Args:
device_list_str: Output of adb or fastboot.
key: The token that signifies a device in device_list_str.
Returns:
A list of android device serial numbers.
"""
try:
clean_lines = str(device_list_str, 'utf-8').strip().split('\n')
except UnicodeDecodeError:
logging.warning("unicode decode error, origin str: %s", device_list_str)
raise
results = []
for line in clean_lines:
tokens = line.strip().split('\t')
if len(tokens) == 2 and tokens[1] == key:
results.append(tokens[0])
return results
def list_adb_devices():
"""List all android devices connected to the computer that are detected by
adb.
Returns:
A list of android device serials. Empty if there's none.
"""
out = adb.AdbProxy().devices()
return parse_device_list(out, 'device')
def list_adb_devices_by_usb_id():
"""List the usb id of all android devices connected to the computer that
are detected by adb.
Returns:
A list of strings that are android device usb ids. Empty if there's
none.
"""
out = adb.AdbProxy().devices(['-l'])
clean_lines = str(out, 'utf-8').strip().split('\n')
results = []
for line in clean_lines:
tokens = line.strip().split()
if len(tokens) > 2 and tokens[1] == 'device':
results.append(tokens[2])
return results
def list_fastboot_devices():
"""List all android devices connected to the computer that are in in
fastboot mode. These are detected by fastboot.
Returns:
A list of android device serials. Empty if there's none.
"""
out = fastboot.FastbootProxy().devices()
return parse_device_list(out, 'fastboot')
def get_instances(serials):
"""Create AndroidDevice instances from a list of serials.
Args:
serials: A list of android device serials.
Returns:
A list of AndroidDevice objects.
"""
_validate_device_existence(serials)
results = []
for s in serials:
results.append(AndroidDevice(s))
return results
def get_instances_with_configs(configs):
"""Create AndroidDevice instances from a list of dict configs.
Each config should have the required key-value pair 'serial'.
Args:
configs: A list of dicts each representing the configuration of one
android device.
Returns:
A list of AndroidDevice objects.
"""
# First make sure each config contains a serial, and all the serials'
# corresponding devices exist.
serials = []
for c in configs:
try:
serials.append(c['serial'])
except KeyError:
raise Error(
'Required value "serial" is missing in AndroidDevice config %s.' % c)
_validate_device_existence(serials)
results = []
for c in configs:
serial = c.pop('serial')
is_required = c.get(KEY_DEVICE_REQUIRED, True)
try:
ad = AndroidDevice(serial)
ad.load_config(c)
except Exception:
if is_required:
raise
ad.log.exception('Skipping this optional device due to error.')
continue
results.append(ad)
return results
def get_all_instances(include_fastboot=False):
"""Create AndroidDevice instances for all attached android devices.
Args:
include_fastboot: Whether to include devices in bootloader mode or not.
Returns:
A list of AndroidDevice objects each representing an android device
attached to the computer.
"""
if include_fastboot:
serial_list = list_adb_devices() + list_fastboot_devices()
return get_instances(serial_list)
return get_instances(list_adb_devices())
def filter_devices(ads, func):
"""Finds the AndroidDevice instances from a list that match certain
conditions.
Args:
ads: A list of AndroidDevice instances.
func: A function that takes an AndroidDevice object and returns True
if the device satisfies the filter condition.
Returns:
A list of AndroidDevice instances that satisfy the filter condition.
"""
results = []
for ad in ads:
if func(ad):
results.append(ad)
return results
def get_devices(ads, **kwargs):
"""Finds a list of AndroidDevice instance from a list that has specific
attributes of certain values.
Example:
get_devices(android_devices, label='foo', phone_number='1234567890')
get_devices(android_devices, model='angler')
Args:
ads: A list of AndroidDevice instances.
kwargs: keyword arguments used to filter AndroidDevice instances.
Returns:
A list of target AndroidDevice instances.
Raises:
Error: No devices are matched.
"""
def _get_device_filter(ad):
for k, v in kwargs.items():
if not hasattr(ad, k):
return False
elif getattr(ad, k) != v:
return False
return True
filtered = filter_devices(ads, _get_device_filter)
if not filtered:
raise Error('Could not find a target device that matches condition: %s.' %
kwargs)
else:
return filtered
def get_device(ads, **kwargs):
"""Finds a unique AndroidDevice instance from a list that has specific
attributes of certain values.
Example:
get_device(android_devices, label='foo', phone_number='1234567890')
get_device(android_devices, model='angler')
Args:
ads: A list of AndroidDevice instances.
kwargs: keyword arguments used to filter AndroidDevice instances.
Returns:
The target AndroidDevice instance.
Raises:
Error: None or more than one device is matched.
"""
filtered = get_devices(ads, **kwargs)
if len(filtered) == 1:
return filtered[0]
else:
serials = [ad.serial for ad in filtered]
raise Error('More than one device matched: %s' % serials)
def take_bug_reports(ads, test_name=None, begin_time=None, destination=None):
"""Takes bug reports on a list of android devices.
If you want to take a bug report, call this function with a list of
android_device objects in on_fail. But reports will be taken on all the
devices in the list concurrently. Bug report takes a relative long
time to take, so use this cautiously.
Args:
ads: A list of AndroidDevice instances.
test_name: Name of the test method that triggered this bug report.
If None, the default name "bugreport" will be used.
begin_time: timestamp taken when the test started, can be either
string or int. If None, the current time will be used.
destination: string, path to the directory where the bugreport
should be saved.
"""
if begin_time is None:
begin_time = mobly_logger.get_log_file_timestamp()
else:
begin_time = mobly_logger.sanitize_filename(str(begin_time))
def take_br(test_name, begin_time, ad, destination):
ad.take_bug_report(test_name=test_name,
begin_time=begin_time,
destination=destination)
args = [(test_name, begin_time, ad, destination) for ad in ads]
utils.concurrent_exec(take_br, args)
class AndroidDevice:
"""Class representing an android device.
Each object of this class represents one Android device in Mobly. This class
provides various ways, like adb, fastboot, and Mobly snippets, to control
an Android device, whether it's a real device or an emulator instance.
You can also register your own services to the device's service manager.
See the docs of `service_manager` and `base_service` for details.
Attributes:
serial: A string that's the serial number of the Androi device.
log_path: A string that is the path where all logs collected on this
android device should be stored.
log: A logger adapted from root logger with an added prefix specific
to an AndroidDevice instance. The default prefix is
[AndroidDevice|<serial>]. Use self.debug_tag = 'tag' to use a
different tag in the prefix.
adb_logcat_file_path: A string that's the full path to the adb logcat
file collected, if any.
adb: An AdbProxy object used for interacting with the device via adb.
fastboot: A FastbootProxy object used for interacting with the device
via fastboot.
services: ServiceManager, the manager of long-running services on the
device.
"""
def __init__(self, serial=''):
self._serial = str(serial)
# logging.log_path only exists when this is used in an Mobly test run.
self._log_path_base = getattr(logging, 'log_path', '/tmp/logs')
self._log_path = os.path.join(self._log_path_base,
'AndroidDevice%s' % self._normalized_serial)
self._debug_tag = self._serial
self.log = AndroidDeviceLoggerAdapter(logging.getLogger(),
{'tag': self.debug_tag})
self._build_info = None
self._is_rebooting = False
self.adb = adb.AdbProxy(serial)
self.fastboot = fastboot.FastbootProxy(serial)
if self.is_rootable:
self.root_adb()
self.services = service_manager.ServiceManager(self)
self.services.register(SERVICE_NAME_LOGCAT,
logcat.Logcat,
start_service=False)
self.services.register('snippets',
snippet_management_service.SnippetManagementService)
# Device info cache.
self._user_added_device_info = {}
def __repr__(self):
return '<AndroidDevice|%s>' % self.debug_tag
@property
def adb_logcat_file_path(self):
if self.services.has_service_by_name(SERVICE_NAME_LOGCAT):
return self.services.logcat.adb_logcat_file_path
@property
def _normalized_serial(self):
"""Normalized serial name for usage in log filename.
Some Android emulators use ip:port as their serial names, while on
Windows `:` is not valid in filename, it should be sanitized first.
"""
if self._serial is None:
return None
return mobly_logger.sanitize_filename(self._serial)
@property
def device_info(self):
"""Information to be pulled into controller info.
The latest serial, model, and build_info are included. Additional info
can be added via `add_device_info`.
"""
info = {
'serial': self.serial,
'model': self.model,
'build_info': self.build_info,
'user_added_info': self._user_added_device_info
}
return info
def add_device_info(self, name, info):
"""Add information of the device to be pulled into controller info.
Adding the same info name the second time will override existing info.
Args:
name: string, name of this info.
info: serializable, content of the info.
"""
self._user_added_device_info.update({name: info})
@property
def sl4a(self):
"""Attribute for direct access of sl4a client.
Not recommended. This is here for backward compatibility reasons.
Preferred: directly access `ad.services.sl4a`.
"""
if self.services.has_service_by_name('sl4a'):
return self.services.sl4a
@property
def ed(self):
"""Attribute for direct access of sl4a's event dispatcher.
Not recommended. This is here for backward compatibility reasons.
Preferred: directly access `ad.services.sl4a.ed`.
"""
if self.services.has_service_by_name('sl4a'):
return self.services.sl4a.ed
@property
def debug_tag(self):
"""A string that represents a device object in debug info. Default value
is the device serial.
This will be used as part of the prefix of debugging messages emitted by
this device object, like log lines and the message of DeviceError.
"""
return self._debug_tag
@debug_tag.setter
def debug_tag(self, tag):
"""Setter for the debug tag.
By default, the tag is the serial of the device, but sometimes it may
be more descriptive to use a different tag of the user's choice.
Changing debug tag changes part of the prefix of debug info emitted by
this object, like log lines and the message of DeviceError.
Example:
By default, the device's serial number is used:
'INFO [AndroidDevice|abcdefg12345] One pending call ringing.'
The tag can be customized with `ad.debug_tag = 'Caller'`:
'INFO [AndroidDevice|Caller] One pending call ringing.'
"""
self.log.info('Logging debug tag set to "%s"', tag)
self._debug_tag = tag
self.log.extra['tag'] = tag
@property
def has_active_service(self):
"""True if any service is running on the device.
A service can be a snippet or logcat collection.
"""
return self.services.is_any_alive
@property
def log_path(self):
"""A string that is the path for all logs collected from this device.
"""
return self._log_path
@log_path.setter
def log_path(self, new_path):
"""Setter for `log_path`, use with caution."""
if self.has_active_service:
raise DeviceError(
self, 'Cannot change `log_path` when there is service running.')
old_path = self._log_path
if new_path == old_path:
return
if os.listdir(new_path):
raise DeviceError(self,
'Logs already exist at %s, cannot override.' % new_path)
if os.path.exists(old_path):
# Remove new path so copytree doesn't complain.
shutil.rmtree(new_path, ignore_errors=True)
shutil.copytree(old_path, new_path)
shutil.rmtree(old_path, ignore_errors=True)
self._log_path = new_path
@property
def serial(self):
"""The serial number used to identify a device.
This is essentially the value used for adb's `-s` arg, which means it
can be a network address or USB bus number.
"""
return self._serial
def update_serial(self, new_serial):
"""Updates the serial number of a device.
The "serial number" used with adb's `-s` arg is not necessarily the
actual serial number. For remote devices, it could be a combination of
host names and port numbers.
This is used for when such identifier of remote devices changes during
a test. For example, when a remote device reboots, it may come back
with a different serial number.
This is NOT meant for switching the object to represent another device.
We intentionally did not make it a regular setter of the serial
property so people don't accidentally call this without understanding
the consequences.
Args:
new_serial: string, the new serial number for the same device.
Raises:
DeviceError: tries to update serial when any service is running.
"""
new_serial = str(new_serial)
if self.has_active_service:
raise DeviceError(
self,
'Cannot change device serial number when there is service running.')
if self._debug_tag == self.serial:
self._debug_tag = new_serial
self._serial = new_serial
self.adb.serial = new_serial
self.fastboot.serial = new_serial
@contextlib.contextmanager
def handle_reboot(self):
"""Properly manage the service life cycle when the device needs to
temporarily disconnect.
The device can temporarily lose adb connection due to user-triggered
reboot. Use this function to make sure the services
started by Mobly are properly stopped and restored afterwards.
For sample usage, see self.reboot().
"""
live_service_names = self.services.list_live_services()
self.services.stop_all()
# On rooted devices, system properties may change on reboot, so disable
# the `build_info` cache by setting `_is_rebooting` to True and
# repopulate it after reboot.
# Note, this logic assumes that instance variable assignment in Python
# is atomic; otherwise, `threading` data structures would be necessary.
# Additionally, nesting calls to `handle_reboot` while changing the
# read-only property values during reboot will result in stale values.
self._is_rebooting = True
try:
yield
finally:
self.wait_for_boot_completion()
# On boot completion, invalidate the `build_info` cache since any
# value it had from before boot completion is potentially invalid.
# If the value gets set after the final invalidation and before
# setting`_is_rebooting` to True, then that's okay because the
# device has finished rebooting at that point, and values at that
# point should be valid.
# If the reboot fails for some reason, then `_is_rebooting` is never
# set to False, which means the `build_info` cache remains disabled
# until the next reboot. This is relatively okay because the
# `build_info` cache is only minimizes adb commands.
self._build_info = None
self._is_rebooting = False
if self.is_rootable:
self.root_adb()
self.services.start_services(live_service_names)
@contextlib.contextmanager
def handle_usb_disconnect(self):
"""Properly manage the service life cycle when USB is disconnected.
The device can temporarily lose adb connection due to user-triggered
USB disconnection, e.g. the following cases can be handled by this
method:
* Power measurement: Using Monsoon device to measure battery consumption
would potentially disconnect USB.
* Unplug USB so device loses connection.
* ADB connection over WiFi and WiFi got disconnected.
* Any other type of USB disconnection, as long as snippet session can
be kept alive while USB disconnected (reboot caused USB
disconnection is not one of these cases because snippet session
cannot survive reboot.
Use handle_reboot() instead).
Use this function to make sure the services started by Mobly are
properly reconnected afterwards.
Just like the usage of self.handle_reboot(), this method does not
automatically detect if the disconnection is because of a reboot or USB
disconnect. Users of this function should make sure the right handle_*
function is used to handle the correct type of disconnection.
This method also reconnects snippet event client. Therefore, the
callback objects created (by calling Async RPC methods) before
disconnection would still be valid and can be used to retrieve RPC
execution result after device got reconnected.
Example Usage:
.. code-block:: python
with ad.handle_usb_disconnect():
try:
# User action that triggers USB disconnect, could throw
# exceptions.
do_something()
finally:
# User action that triggers USB reconnect
action_that_reconnects_usb()
# Make sure device is reconnected before returning from this
# context
ad.adb.wait_for_device(timeout=SOME_TIMEOUT)
"""
live_service_names = self.services.list_live_services()
self.services.pause_all()
try:
yield
finally:
self.services.resume_services(live_service_names)
@property
def build_info(self):
"""Get the build info of this Android device, including build id and
build type.
This is not available if the device is in bootloader mode.
Returns:
A dict with the build info of this Android device, or None if the
device is in bootloader mode.
"""
if self.is_bootloader:
self.log.error('Device is in fastboot mode, could not get build ' 'info.')
return
if self._build_info is None or self._is_rebooting:
info = {}
build_info = self.adb.getprops(CACHED_SYSTEM_PROPS)
info['build_id'] = build_info['ro.build.id']
info['build_type'] = build_info['ro.build.type']
info['build_version_codename'] = build_info.get(
'ro.build.version.codename', '')
info['build_version_sdk'] = build_info.get('ro.build.version.sdk', '')
info['build_product'] = build_info.get('ro.build.product', '')
info['build_characteristics'] = build_info.get('ro.build.characteristics',
'')
info['debuggable'] = build_info.get('ro.debuggable', '')
info['product_name'] = build_info.get('ro.product.name', '')
info['hardware'] = build_info.get('ro.hardware', '')
self._build_info = info
return info
return self._build_info
@property
def is_bootloader(self):
"""True if the device is in bootloader mode.
"""
return self.serial in list_fastboot_devices()
@property
def is_adb_root(self):
"""True if adb is running as root for this device.
"""
try:
return '0' == self.adb.shell('id -u').decode('utf-8').strip()
except adb.AdbError:
# Wait a bit and retry to work around adb flakiness for this cmd.
time.sleep(0.2)
return '0' == self.adb.shell('id -u').decode('utf-8').strip()
@property
def is_rootable(self):
return not self.is_bootloader and self.build_info['debuggable'] == '1'
@property
def model(self):
"""The Android code name for the device.
"""
# If device is in bootloader mode, get mode name from fastboot.
if self.is_bootloader:
out = self.fastboot.getvar('product').strip()
# 'out' is never empty because of the 'total time' message fastboot
# writes to stderr.
lines = out.decode('utf-8').split('\n', 1)
if lines:
tokens = lines[0].split(' ')
if len(tokens) > 1:
return tokens[1].lower()
return None
model = self.build_info['build_product'].lower()
if model == 'sprout':
return model
return self.build_info['product_name'].lower()
@property
def is_emulator(self):
"""Whether this device is probably an emulator.
Returns:
True if this is probably an emulator.
"""
if EMULATOR_SERIAL_REGEX.match(self.serial):
# If the device's serial follows 'emulator-dddd', then it's almost
# certainly an emulator.
return True
elif self.build_info['build_characteristics'] == 'emulator':
# If the device says that it's an emulator, then it's probably an
# emulator although some real devices apparently report themselves
# as emulators in addition to other things, so only return True on
# an exact match.
return True
elif self.build_info['hardware'] in ['ranchu', 'goldfish']:
# Ranchu and Goldfish are the hardware properties that the AOSP
# emulators report, so if the device says it's an AOSP emulator, it
# probably is one.
return True
else:
return False
def load_config(self, config):
"""Add attributes to the AndroidDevice object based on config.
Args:
config: A dictionary representing the configs.
Raises:
Error: The config is trying to overwrite an existing attribute.
"""
for k, v in config.items():
if hasattr(self, k) and k not in _ANDROID_DEVICE_SETTABLE_PROPS:
raise DeviceError(
self, ('Attribute %s already exists with value %s, cannot set '
'again.') % (k, getattr(self, k)))
setattr(self, k, v)
def root_adb(self):
"""Change adb to root mode for this device if allowed.
If executed on a production build, adb will not be switched to root
mode per security restrictions.
"""
self.adb.root()
# `root` causes the device to temporarily disappear from adb.
# So we need to wait for the device to come back before proceeding.
self.adb.wait_for_device(timeout=DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND)
def load_snippet(self, name, package):
"""Starts the snippet apk with the given package name and connects.
Examples:
.. code-block:: python
ad.load_snippet(
name='maps', package='com.google.maps.snippets')
ad.maps.activateZoom('3')
Args:
name: string, the attribute name to which to attach the snippet
client. E.g. `name='maps'` attaches the snippet client to
`ad.maps`.
package: string, the package name of the snippet apk to connect to.
Raises:
SnippetError: Illegal load operations are attempted.
"""
# Should not load snippet with an existing attribute.
if hasattr(self, name):
raise SnippetError(
self,
'Attribute "%s" already exists, please use a different name.' % name)
self.services.snippets.add_snippet_client(name, package)
def unload_snippet(self, name):
"""Stops a snippet apk.
Args:
name: The attribute name the snippet server is attached with.
Raises:
SnippetError: The given snippet name is not registered.
"""
self.services.snippets.remove_snippet_client(name)
def generate_filename(self,
file_type,
time_identifier=None,
extension_name=None):
"""Generates a name for an output file related to this device.
The name follows the pattern:
{file type},{debug_tag},{serial},{model},{time identifier}.{ext}
"debug_tag" is only added if it's different from the serial. "ext" is
added if specified by user.
Args:
file_type: string, type of this file, like "logcat" etc.
time_identifier: string or RuntimeTestInfo. If a `RuntimeTestInfo`
is passed in, the `signature` of the test case will be used. If
a string is passed in, the string itself will be used.
Otherwise the current timestamp will be used.
extension_name: string, the extension name of the file.
Returns:
String, the filename generated.
"""
time_str = time_identifier
if time_identifier is None:
time_str = mobly_logger.get_log_file_timestamp()
elif isinstance(time_identifier, runtime_test_info.RuntimeTestInfo):
time_str = time_identifier.signature
filename_tokens = [file_type]
if self.debug_tag != self.serial:
filename_tokens.append(self.debug_tag)
filename_tokens.extend([self.serial, self.model, time_str])
filename_str = ','.join(filename_tokens)
if extension_name is not None:
filename_str = '%s.%s' % (filename_str, extension_name)
filename_str = mobly_logger.sanitize_filename(filename_str)
self.log.debug('Generated filename: %s', filename_str)
return filename_str
def take_bug_report(self,
test_name=None,
begin_time=None,
timeout=300,
destination=None):
"""Takes a bug report on the device and stores it in a file.
Args:
test_name: Name of the test method that triggered this bug report.
begin_time: Timestamp of when the test started. If not set, then
this will default to the current time.
timeout: float, the number of seconds to wait for bugreport to
complete, default is 5min.
destination: string, path to the directory where the bugreport
should be saved.
Returns:
A string that is the absolute path to the bug report on the host.
"""
prefix = DEFAULT_BUG_REPORT_NAME
if test_name:
prefix = '%s,%s' % (DEFAULT_BUG_REPORT_NAME, test_name)
if begin_time is None:
begin_time = mobly_logger.get_log_file_timestamp()
new_br = True
try:
stdout = self.adb.shell('bugreportz -v').decode('utf-8')
# This check is necessary for builds before N, where adb shell's ret
# code and stderr are not propagated properly.
if 'not found' in stdout:
new_br = False
except adb.AdbError:
new_br = False
if destination is None:
destination = os.path.join(self.log_path, 'BugReports')
br_path = utils.abs_path(destination)
utils.create_dir(br_path)
filename = self.generate_filename(prefix, str(begin_time), 'txt')
if new_br:
filename = filename.replace('.txt', '.zip')
full_out_path = os.path.join(br_path, filename)
# in case device restarted, wait for adb interface to return
self.wait_for_boot_completion()
self.log.debug('Start taking bugreport.')
if new_br:
out = self.adb.shell('bugreportz', timeout=timeout).decode('utf-8')
if not out.startswith('OK'):
raise DeviceError(self, 'Failed to take bugreport: %s' % out)
br_out_path = out.split(':')[1].strip()
self.adb.pull([br_out_path, full_out_path])
self.adb.shell(['rm', br_out_path])
else:
# shell=True as this command redirects the stdout to a local file
# using shell redirection.
self.adb.bugreport(' > "%s"' % full_out_path, shell=True, timeout=timeout)
self.log.debug('Bugreport taken at %s.', full_out_path)
return full_out_path
def take_screenshot(self, destination, prefix='screenshot'):
"""Takes a screenshot of the device.
Args:
destination: string, full path to the directory to save in.
prefix: string, prefix file name of the screenshot.
Returns:
string, full path to the screenshot file on the host.
"""
filename = self.generate_filename(prefix, extension_name='png')
device_path = os.path.join('/storage/emulated/0/', filename)
self.adb.shell(['screencap', '-p', device_path],
timeout=TAKE_SCREENSHOT_TIMEOUT_SECOND)
utils.create_dir(destination)
self.adb.pull([device_path, destination])
pic_path = os.path.join(destination, filename)
self.log.debug('Screenshot taken, saved on the host: %s', pic_path)
self.adb.shell(['rm', device_path])
return pic_path
def run_iperf_client(self, server_host, extra_args=''):
"""Start iperf client on the device.
Return status as true if iperf client start successfully.
And data flow information as results.
Args:
server_host: Address of the iperf server.
extra_args: A string representing extra arguments for iperf client,
e.g. '-i 1 -t 30'.
Returns:
status: true if iperf client start successfully.
results: results have data flow information
"""
out = self.adb.shell('iperf3 -c %s %s' % (server_host, extra_args))
clean_out = str(out, 'utf-8').strip().split('\n')
if 'error' in clean_out[0].lower():
return False, clean_out
return True, clean_out
def wait_for_boot_completion(self,
timeout=DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND):
"""Waits for Android framework to broadcast ACTION_BOOT_COMPLETED.
This function times out after 15 minutes.
Args:
timeout: float, the number of seconds to wait before timing out.
If not specified, no timeout takes effect.
"""
deadline = time.perf_counter() + timeout
self.adb.wait_for_device(timeout=timeout)
while time.perf_counter() < deadline:
try:
if self.is_boot_completed():
return
except (adb.AdbError, adb.AdbTimeoutError):
# adb shell calls may fail during certain period of booting
# process, which is normal. Ignoring these errors.
pass
time.sleep(5)
raise DeviceError(self, 'Booting process timed out')
def is_boot_completed(self):
"""Checks if device boot is completed by verifying system property."""
completed = self.adb.getprop('sys.boot_completed')
if completed == '1':
self.log.debug('Device boot completed.')
return True
return False
def is_adb_detectable(self):
"""Checks if USB is on and device is ready by verifying adb devices."""
serials = list_adb_devices()
if self.serial in serials:
self.log.debug('Is now adb detectable.')
return True
return False
def reboot(self):
"""Reboots the device.
Generally one should use this method to reboot the device instead of
directly calling `adb.reboot`. Because this method gracefully handles
the teardown and restoration of running services.
This method is blocking and only returns when the reboot has completed
and the services restored.
Raises:
Error: Waiting for completion timed out.
"""
if self.is_bootloader:
self.fastboot.reboot()
return
with self.handle_reboot():
self.adb.reboot()
def __getattr__(self, name):
"""Tries to return a snippet client registered with `name`.
This is for backward compatibility of direct accessing snippet clients.
"""
client = self.services.snippets.get_snippet_client(name)
if client:
return client
return self.__getattribute__(name)
# Properties in AndroidDevice that have setters.
# This line has to live below the AndroidDevice code.
_ANDROID_DEVICE_SETTABLE_PROPS = utils.get_settable_properties(AndroidDevice)
class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
"""A wrapper class that adds a prefix to each log line.
Usage:
.. code-block:: python
my_log = AndroidDeviceLoggerAdapter(logging.getLogger(), {
'tag': <custom tag>
})
Then each log line added by my_log will have a prefix
'[AndroidDevice|<tag>]'
"""
def process(self, msg, kwargs):
msg = _DEBUG_PREFIX_TEMPLATE % (self.extra['tag'], msg)
return (msg, kwargs)
| apache-2.0 |
benjaminbrinkman/open-ad-platform | .venv/lib/python3.4/site-packages/_markerlib/markers.py | 1769 | 3979 | # -*- coding: utf-8 -*-
"""Interpret PEP 345 environment markers.
EXPR [in|==|!=|not in] EXPR [or|and] ...
where EXPR belongs to any of those:
python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
python_full_version = sys.version.split()[0]
os.name = os.name
sys.platform = sys.platform
platform.version = platform.version()
platform.machine = platform.machine()
platform.python_implementation = platform.python_implementation()
a free string, like '2.6', or 'win32'
"""
__all__ = ['default_environment', 'compile', 'interpret']
import ast
import os
import platform
import sys
import weakref
_builtin_compile = compile
try:
from platform import python_implementation
except ImportError:
if os.name == "java":
# Jython 2.5 has ast module, but not platform.python_implementation() function.
def python_implementation():
return "Jython"
else:
raise
# restricted set of variables
_VARS = {'sys.platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# FIXME parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os.name': os.name,
'platform.version': platform.version(),
'platform.machine': platform.machine(),
'platform.python_implementation': python_implementation(),
'extra': None # wheel extension
}
for var in list(_VARS.keys()):
if '.' in var:
_VARS[var.replace('.', '_')] = _VARS[var]
def default_environment():
"""Return copy of default PEP 385 globals dictionary."""
return dict(_VARS)
class ASTWhitelist(ast.NodeTransformer):
def __init__(self, statement):
self.statement = statement # for error messages
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
# Bool operations
ALLOWED += (ast.And, ast.Or)
# Comparison operations
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
def visit(self, node):
"""Ensure statement only contains allowed nodes."""
if not isinstance(node, self.ALLOWED):
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
(self.statement,
(' ' * node.col_offset) + '^'))
return ast.NodeTransformer.visit(self, node)
def visit_Attribute(self, node):
"""Flatten one level of attribute access."""
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
return ast.copy_location(new_node, node)
def parse_marker(marker):
tree = ast.parse(marker, mode='eval')
new_tree = ASTWhitelist(marker).generic_visit(tree)
return new_tree
def compile_marker(parsed_marker):
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
dont_inherit=True)
_cache = weakref.WeakValueDictionary()
def compile(marker):
"""Return compiled marker as a function accepting an environment dict."""
try:
return _cache[marker]
except KeyError:
pass
if not marker.strip():
def marker_fn(environment=None, override=None):
""""""
return True
else:
compiled_marker = compile_marker(parse_marker(marker))
def marker_fn(environment=None, override=None):
"""override updates environment"""
if override is None:
override = {}
if environment is None:
environment = default_environment()
environment.update(override)
return eval(compiled_marker, environment)
marker_fn.__doc__ = marker
_cache[marker] = marker_fn
return _cache[marker]
def interpret(marker, environment=None):
return compile(marker)(environment)
| mit |
neilhan/tensorflow | tensorflow/python/ops/histogram_ops.py | 12 | 3799 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""## Histograms
@@histogram_fixed_width
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import math_ops
def histogram_fixed_width(values,
value_range,
nbins=100,
dtype=dtypes.int32,
name=None):
"""Return histogram of values.
Given the tensor `values`, this operation returns a rank 1 histogram counting
the number of entries in `values` that fell into every bin. The bins are
equal width and determined by the arguments `value_range` and `nbins`.
Args:
values: Numeric `Tensor`.
value_range: Shape [2] `Tensor`. new_values <= value_range[0] will be
mapped to hist[0], values >= value_range[1] will be mapped to hist[-1].
Must be same dtype as new_values.
nbins: Scalar `int32 Tensor`. Number of histogram bins.
dtype: dtype for returned histogram.
name: A name for this operation (defaults to 'histogram_fixed_width').
Returns:
A 1-D `Tensor` holding histogram of values.
Examples:
```python
# Bins will be: (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
nbins = 5
value_range = [0.0, 5.0]
new_values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with tf.default_session() as sess:
hist = tf.histogram_fixed_width(new_values, value_range, nbins=5)
variables.initialize_all_variables().run()
sess.run(hist) => [2, 1, 1, 0, 2]
```
"""
with ops.name_scope(name, 'histogram_fixed_width',
[values, value_range, nbins]) as scope:
values = ops.convert_to_tensor(values, name='values')
values = array_ops.reshape(values, [-1])
value_range = ops.convert_to_tensor(value_range, name='value_range')
nbins = ops.convert_to_tensor(nbins, dtype=dtypes.int32, name='nbins')
nbins_float = math_ops.to_float(nbins)
# Map tensor values that fall within value_range to [0, 1].
scaled_values = math_ops.truediv(values - value_range[0],
value_range[1] - value_range[0],
name='scaled_values')
# map tensor values within the open interval value_range to {0,.., nbins-1},
# values outside the open interval will be zero or less, or nbins or more.
indices = math_ops.floor(nbins_float * scaled_values, name='indices')
# Clip edge cases (e.g. value = value_range[1]) or "outliers."
indices = math_ops.cast(
clip_ops.clip_by_value(indices, 0, nbins_float - 1), dtypes.int32)
# TODO(langmore) This creates an array of ones to add up and place in the
# bins. This is inefficient, so replace when a better Op is available.
return math_ops.unsorted_segment_sum(
array_ops.ones_like(indices, dtype=dtype),
indices,
nbins,
name=scope)
| apache-2.0 |
ccpgames/eve-metrics | web2py/applications/admin/languages/ru-ru.py | 1 | 23612 | # coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Update" ist ein optionaler Ausdruck wie "Feld1 = \'newvalue". JOIN Ergebnisse können nicht aktualisiert oder gelöscht werden',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '%s строк удалено',
'%s rows updated': '%s строк обновлено',
'(requires internet access)': '(требует подключения к интернету)',
'(something like "it-it")': '(наподобие "it-it")',
'A new version of web2py is available': 'Доступна новая версия web2py',
'A new version of web2py is available: %s': 'Доступна новая версия web2py: %s',
'A new version of web2py is available: Version 1.85.3 (2010-09-18 07:07:46)\n': 'Доступна новая версия web2py: Версия 1.85.3 (2010-09-18 07:07:46)\n',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ВНИМАНИЕ: Для входа требуется бесопасное (HTTPS) соединение либо запуск на localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ВНИМАНИЕ: Тестирование не потокобезопасно, поэтому не запускайте несколько тестов параллельно.',
'ATTENTION: This is an experimental feature and it needs more testing.': 'ВНИМАНИЕ: Это экспериментальная возможность и требует тестирования.',
'ATTENTION: you cannot edit the running application!': 'ВНИМАНИЕ: Вы не можете редактировать работающее приложение!',
'Abort': 'Отмена',
'About': 'О',
'About application': 'О приложении',
'Additional code for your application': 'Допольнительный код для вашего приложения',
'Admin is disabled because insecure channel': 'Админпанель выключена из-за небезопасного соединения',
'Admin is disabled because unsecure channel': 'Админпанель выключен из-за небезопасного соединения',
'Admin language': 'Язык админпанели',
'Administrator Password:': 'Пароль администратора:',
'Application name:': 'Название приложения:',
'Are you sure you want to delete file "%s"?': 'Вы действительно хотите удалить файл "%s"?',
'Are you sure you want to uninstall application "%s"': 'Вы действительно хотите удалить приложение "%s"',
'Are you sure you want to uninstall application "%s"?': 'Вы действительно хотите удалить приложение "%s"?',
'Are you sure you want to upgrade web2py now?': 'Вы действительно хотите обновить web2py сейчас?',
'Authentication': 'Аутентификация',
'Available databases and tables': 'Доступные базы данных и таблицы',
'Cannot be empty': 'Не может быть пустым',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Невозможно компилировать: в приложении присутствуют ошибки. Отладьте его, исправьте ошибки и попробуйте заново.',
'Change Password': 'Изменить пароль',
'Check to delete': 'Поставьте для удаления',
'Checking for upgrades...': 'Проверка обновлений...',
'Client IP': 'IP клиента',
'Controller': 'Контроллер',
'Controllers': 'Контроллеры',
'Copyright': 'Copyright',
'Create new simple application': 'Создать новое простое приложение',
'Current request': 'Текущий запрос',
'Current response': 'Текущий ответ',
'Current session': 'Текущая сессия',
'DB Model': 'Модель БД',
'DESIGN': 'ДИЗАЙН',
'Database': 'База данных',
'Date and Time': 'Дата и время',
'Delete': 'Удалить',
'Delete:': 'Удалить:',
'Deploy on Google App Engine': 'Развернуть на Google App Engine',
'Description': 'Описание',
'Design for': 'Дизайн для',
'E-mail': 'E-mail',
'EDIT': 'ПРАВКА',
'Edit': 'Правка',
'Edit Profile': 'Правка профиля',
'Edit This App': 'Правка данного приложения',
'Edit application': 'Правка приложения',
'Edit current record': 'Правка текущей записи',
'Editing Language file': 'Правка языкового файла',
'Editing file': 'Правка файла',
'Editing file "%s"': 'Правка файла "%s"',
'Enterprise Web Framework': 'Enterprise Web Framework',
'Error logs for "%(app)s"': 'Журнал ошибок для "%(app)"',
'Exception instance attributes': 'Атрибуты объекта исключения',
'Expand Abbreviation': 'Раскрыть аббревиатуру',
'First name': 'Имя',
'Functions with no doctests will result in [passed] tests.': 'Функции без doctest будут давать [прошел] в тестах.',
'Go to Matching Pair': 'К подходящей паре',
'Group ID': 'ID группы',
'Hello World': 'Привет, Мир',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Если отчет выше содержит номер ошибки, это указывает на ошибку при работе контроллера, до попытки выполнить doctest. Причиной чаще является неверные отступы или ошибки в коде вне функции. \nЗеленый заголовок указывает на успешное выполнение всех тестов. В этом случае результаты тестов не показываются.',
'If you answer "yes", be patient, it may take a while to download': 'Если вы ответили "Да", потерпите, загрузка может потребовать времени',
'If you answer yes, be patient, it may take a while to download': 'Если вы ответили "Да", потерпите, загрузка может потребовать времени',
'Import/Export': 'Импорт/Экспорт',
'Index': 'Индекс',
'Installed applications': 'Установленные приложения',
'Internal State': 'Внутренний статус',
'Invalid Query': 'Неверный запрос',
'Invalid action': 'Неверное действие',
'Invalid email': 'Неверный email',
'Key bindings': 'Связываник клавиш',
'Key bindings for ZenConding Plugin': 'Связывание клавиш для плагина ZenConding',
'Language files (static strings) updated': 'Языковые файлы (статичные строки) обновлены',
'Languages': 'Языки',
'Last name': 'Фамилия',
'Last saved on:': 'Последнее сохранение:',
'Layout': 'Верстка',
'License for': 'Лицензия для',
'Login': 'Логин',
'Login to the Administrative Interface': 'Вход в интерфейс администратора',
'Logout': 'Выход',
'Lost Password': 'Забыли пароль',
'Main Menu': 'Главное меню',
'Match Pair': 'Найти пару',
'Menu Model': 'Модель меню',
'Merge Lines': 'Объединить линии',
'Models': 'Модели',
'Modules': 'Модули',
'NO': 'НЕТ',
'Name': 'Название',
'New Record': 'Новая запись',
'New application wizard': 'Мастер нового приложения',
'New simple application': 'Новое простое приложение',
'Next Edit Point': 'Следующее место правки',
'No databases in this application': 'В приложении нет базы данных',
'Origin': 'Оригинал',
'Original/Translation': 'Оригинал/Перевод',
'Password': 'Пароль',
'Peeking at file': 'Просмотр',
'Plugin "%s" in application': 'Плагин "%s" в приложении',
'Plugins': 'Плагины',
'Powered by': 'Обеспечен',
'Previous Edit Point': 'Предыдущее место правки',
'Query:': 'Запрос:',
'Record ID': 'ID записи',
'Register': 'Зарегистрироваться',
'Registration key': 'Ключ регистрации',
'Reset Password key': 'Сброс пароля',
'Resolve Conflict file': 'Решить конфликт в файле',
'Role': 'Роль',
'Rows in table': 'Строк в таблице',
'Rows selected': 'Выбрано строк',
'Save via Ajax': 'Сохранить через Ajax',
'Saved file hash:': 'Хэш сохраненного файла:',
'Searching:': 'Поиск:',
'Static files': 'Статические файлы',
'Stylesheet': 'Таблицы стилей',
'Sure you want to delete this object?': 'Действительно хотите удалить данный объект?',
'TM': 'TM',
'Table name': 'Название таблицы',
'Testing application': 'Тест приложения',
'Testing controller': 'Тест контроллера',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"query" является условием вида "db.table1.field1 == \'значение\'". Что-либо типа "db.table1.field1 db.table2.field2 ==" ведет к SQL JOIN.',
'The application logic, each URL path is mapped in one exposed function in the controller': 'Логика приложения, каждый URL отображается к одной функции в контроллере',
'The data representation, define database tables and sets': 'Представление данных, определите таблицы базы данных и наборы',
'The output of the file is a dictionary that was rendered by the view': 'Выводом файла является словарь, который создан в виде',
'The presentations layer, views are also known as templates': 'Слой презентации, виды так же известны, как шаблоны',
'There are no controllers': 'Отсутствуют контроллеры',
'There are no models': 'Отсутствуют модели',
'There are no modules': 'Отсутствуют модули',
'There are no plugins': 'Отсутствуют плагины',
'There are no static files': 'Отсутствуют статичные файлы',
'There are no translators, only default language is supported': 'Отсутствуют переводчики, поддерживается только стандартный язык',
'There are no views': 'Отсутствуют виды',
'These files are served without processing, your images go here': 'Эти файлы обслуживаются без обработки, ваши изображения попадут сюда',
'This is a copy of the scaffolding application': 'Это копия сгенерированного приложения',
'This is the %(filename)s template': 'Это шаблон %(filename)s',
'Ticket': 'Тикет',
'Timestamp': 'Время',
'To create a plugin, name a file/folder plugin_[name]': 'Для создания плагина назовите файл/папку plugin_[название]',
'Translation strings for the application': 'Строки перевода для приложения',
'Unable to check for upgrades': 'Невозможно проверить обновления',
'Unable to download': 'Невозможно загрузить',
'Unable to download app': 'Невозможно загрузить',
'Update:': 'Обновить:',
'Upload & install packed application': 'Загрузить и установить приложение в архиве',
'Upload a package:': 'Загрузить пакет:',
'Upload existing application': 'Загрузить существующее приложение',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Используйте (...)&(...) для AND, (...)|(...) для OR, и ~(...) для NOT при создании сложных запросов.',
'Use an url:': 'Используйте url:',
'User ID': 'ID пользователя',
'Version': 'Версия',
'View': 'Вид',
'Views': 'Виды',
'Welcome %s': 'Добро пожаловать, %s',
'Welcome to web2py': 'Добро пожаловать в web2py',
'Which called the function': 'Который вызвал функцию',
'Wrap with Abbreviation': 'Заключить в аббревиатуру',
'YES': 'ДА',
'You are successfully running web2py': 'Вы успешно запустили web2by',
'You can modify this application and adapt it to your needs': 'Вы можете изменить это приложение и подогнать под свои нужды',
'You visited the url': 'Вы посетили URL',
'About': 'О',
'additional code for your application': 'добавочный код для вашего приложения',
'admin disabled because no admin password': 'админка отключена, потому что отсутствует пароль администратора',
'admin disabled because not supported on google apps engine': 'админка отключена, т.к. не поддерживается на google app engine',
'admin disabled because unable to access password file': 'админка отключена, т.к. невозможно получить доступ к файлу с паролями ',
'administrative interface': 'интерфейс администратора',
'and rename it (required):': 'и переименуйте его (необходимо):',
'and rename it:': ' и переименуйте его:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'Appadmin отключен, т.к. соединение не безопасно',
'application "%s" uninstalled': 'Приложение "%s" удалено',
'application compiled': 'Приложение скомпилировано',
'application is compiled and cannot be designed': 'Приложение скомпилировано и дизайн не может быть изменен',
'arguments': 'аргументы',
'back': 'назад',
'beautify': 'Раскрасить',
'cache': 'кэш',
'cache, errors and sessions cleaned': 'Кэш, ошибки и сессии очищены',
'call': 'вызов',
'cannot create file': 'Невозможно создать файл',
'cannot upload file "%(filename)s"': 'Невозможно загрузить файл "%(filename)s"',
'Change admin password': 'Изменить пароль администратора',
'change password': 'изменить пароль',
'check all': 'проверить все',
'Check for upgrades': 'проверить обновления',
'Clean': 'Очистить',
'click here for online examples': 'нажмите здесь для онлайн примеров',
'click here for the administrative interface': 'нажмите здесь для интерфейса администратора',
'click to check for upgrades': 'нажмите для проверки обновления',
'code': 'код',
'collapse/expand all': 'свернуть/развернуть все',
'Compile': 'Компилировать',
'compiled application removed': 'скомпилированное приложение удалено',
'controllers': 'контроллеры',
'Create': 'Создать',
'create file with filename:': 'Создать файл с названием:',
'create new application:': 'создать новое приложение:',
'created by': 'создано',
'crontab': 'crontab',
'currently running': 'сейчас работает',
'currently saved or': 'сейчас сохранено или',
'customize me!': 'настрой меня!',
'data uploaded': 'дата загрузки',
'database': 'база данных',
'database %s select': 'Выбор базы данных %s ',
'database administration': 'администраторирование базы данных',
'db': 'бд',
'defines tables': 'определить таблицы',
'delete': 'удалить',
'delete all checked': 'удалить выбранные',
'delete plugin': 'удалить плагин',
'Deploy': 'Развернуть',
'design': 'дизайн',
'direction: ltr': 'направление: ltr',
'documentation': 'документация',
'done!': 'выполнено!',
'download layouts': 'загрузить шаблоны',
'download plugins': 'загрузить плагины',
'Edit': 'Правка',
'edit controller': 'правка контроллера',
'edit profile': 'правка профиля',
'edit views:': 'правка видов:',
'Errors': 'Ошибка',
'escape': 'escape',
'export as csv file': 'Экспорт в CSV',
'exposes': 'открывает',
'extends': 'расширяет',
'failed to reload module': 'невозможно загрузить модуль',
'file "%(filename)s" created': 'файл "%(filename)s" создан',
'file "%(filename)s" deleted': 'файл "%(filename)s" удален',
'file "%(filename)s" uploaded': 'файл "%(filename)s" загружен',
'file "%(filename)s" was not deleted': 'файл "%(filename)s" не был удален',
'file "%s" of %s restored': 'файл "%s" из %s восстановлен',
'file changed on disk': 'файл изменился на диске',
'file does not exist': 'файл не существует',
'file saved on %(time)s': 'файл сохранен %(time)s',
'file saved on %s': 'файл сохранен %s',
'files': 'файлы',
'filter': 'фильтр',
'Help': 'Помощь',
'htmledit': 'htmledit',
'includes': 'включает',
'index': 'index',
'insert new': 'вставить новый',
'insert new %s': 'вставить новый %s',
'Install': 'Установить',
'internal error': 'внутренняя ошибка',
'invalid password': 'неверный пароль',
'invalid request': 'неверный запрос',
'invalid ticket': 'неверный тикет',
'language file "%(filename)s" created/updated': 'Языковой файл "%(filename)s" создан/обновлен',
'languages': 'языки',
'languages updated': 'языки обновлены',
'loading...': 'загрузка...',
'located in the file': 'расположенный в файле',
'login': 'логин',
'Logout': 'выход',
'lost password?': 'Пароль утерен?',
'merge': 'объединить',
'models': 'модели',
'modules': 'модули',
'new application "%s" created': 'новое приложение "%s" создано',
'new record inserted': 'новая запись вставлена',
'next 100 rows': 'следующие 100 строк',
'or import from csv file': 'или испорт из cvs файла',
'or provide app url:': 'или URL приложения:',
'or provide application url:': 'или URL приложения:',
'Overwrite installed app': 'Переписать на установленное приложение',
'Pack all': 'упаковать все',
'Pack compiled': 'Архив скомпилирован',
'pack plugin': 'Упаковать плагин',
'please wait!': 'подождите, пожалуйста!',
'plugins': 'плагины',
'previous 100 rows': 'предыдущие 100 строк',
'record': 'запись',
'record does not exist': 'запись не существует',
'record id': 'id записи',
'register': 'зарегистрироваться',
'Remove compiled': 'Удалить скомпилированное',
'restore': 'восстановить',
'revert': 'откатиться',
'save': 'сохранить',
'selected': 'выбрано',
'session expired': 'сессия истекла',
'shell': 'shell',
'Site': 'сайт',
'some files could not be removed': 'некоторые файлы нельзя удалить',
'Start wizard': 'запустить мастер',
'state': 'статус',
'static': 'статичные файлы',
'submit': 'Отправить',
'table': 'таблица',
'test': 'тест',
'test_def': 'test_def',
'test_for': 'test_for',
'test_if': 'test_if',
'test_try': 'test_try',
'the application logic, each URL path is mapped in one exposed function in the controller': 'Логика приложения, каждый URL отображается в открытую функцию в контроллере',
'the data representation, define database tables and sets': 'представление данных, определить таблицы и наборы',
'the presentations layer, views are also known as templates': 'слой представления, виды известные так же как шаблоны',
'these files are served without processing, your images go here': 'Эти файлы обслуживаются без обработки, ваши изображения попадут сюда',
'to previous version.': 'на предыдущую версию.',
'translation strings for the application': 'строки перевода для приложения',
'try': 'try',
'try something like': 'попробовать что-либо вида',
'unable to create application "%s"': 'невозможно создать приложение "%s" nicht möglich',
'unable to delete file "%(filename)s"': 'невозможно удалить файл "%(filename)s"',
'unable to parse csv file': 'невозможно разобрать файл csv',
'unable to uninstall "%s"': 'невозможно удалить "%s"',
'uncheck all': 'снять выбор всего',
'Uninstall': 'Удалить',
'update': 'обновить',
'update all languages': 'обновить все языки',
'upgrade web2py now': 'обновить web2py сейчас',
'upload': 'загрузить',
'upload application:': 'загрузить файл:',
'upload file:': 'загрузить файл:',
'upload plugin file:': 'загрузить файл плагина:',
'user': 'пользователь',
'variables': 'переменные',
'versioning': 'версии',
'view': 'вид',
'views': 'виды',
'web2py Recent Tweets': 'последние твиты по web2py',
'web2py is up to date': 'web2py обновлен',
'xml': 'xml',
}
| mit |
egabancho/invenio | invenio/modules/oauth2server/provider.py | 1 | 3287 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Configuration of flask-oauthlib provider."""
from datetime import datetime, timedelta
from flask import current_app
from flask.ext.login import current_user
from flask_oauthlib.provider import OAuth2Provider
from invenio.ext.sqlalchemy import db
from invenio.modules.accounts.models import User
from .models import Token, Client
oauth2 = OAuth2Provider()
@oauth2.usergetter
def get_user(username, password, *args, **kwargs):
"""Get user for grant type password.
Needed for grant type 'password'. Note, grant type password is by default
disabled.
"""
user = User.query.filter_by(username=username).first()
if user.check_password(password):
return user
@oauth2.tokengetter
def get_token(access_token=None, refresh_token=None):
"""Load an access token.
Add support for personal access tokens compared to flask-oauthlib
"""
if access_token:
t = Token.query.filter_by(access_token=access_token).first()
if t and t.is_personal:
t.expires = datetime.utcnow() + timedelta(
seconds=int(current_app.config.get(
'OAUTH2_PROVIDER_TOKEN_EXPIRES_IN'
))
)
return t
elif refresh_token:
return Token.query.join(Token.client).filter(
Token.refresh_token == refresh_token,
Token.is_personal == False,
Client.is_confidential == True,
).first()
else:
return None
@oauth2.tokensetter
def save_token(token, request, *args, **kwargs):
"""Token persistence."""
# Exclude the personal access tokens which doesn't expire.
uid = request.user.id if request.user else current_user.get_id()
tokens = Token.query.filter_by(
client_id=request.client.client_id,
user_id=uid,
is_personal=False,
)
# make sure that every client has only one token connected to a user
if tokens:
for tk in tokens:
db.session.delete(tk)
db.session.commit()
expires_in = token.get('expires_in')
expires = datetime.utcnow() + timedelta(seconds=int(expires_in))
tok = Token(
access_token=token['access_token'],
refresh_token=token.get('refresh_token'),
token_type=token['token_type'],
_scopes=token['scope'],
expires=expires,
client_id=request.client.client_id,
user_id=uid,
is_personal=False,
)
db.session.add(tok)
db.session.commit()
return tok
| gpl-2.0 |
dsquareindia/scikit-learn | sklearn/decomposition/tests/test_fastica.py | 70 | 7808 | """
Test the fastica algorithm.
"""
import itertools
import warnings
import numpy as np
from scipy import stats
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_raises
from sklearn.decomposition import FastICA, fastica, PCA
from sklearn.decomposition.fastica_ import _gs_decorrelation
from sklearn.externals.six import moves
def center_and_norm(x, axis=-1):
""" Centers and norms x **in place**
Parameters
-----------
x: ndarray
Array with an axis of observations (statistical units) measured on
random variables.
axis: int, optional
Axis along which the mean and variance are calculated.
"""
x = np.rollaxis(x, axis)
x -= x.mean(axis=0)
x /= x.std(axis=0)
def test_gs():
# Test gram schmidt orthonormalization
# generate a random orthogonal matrix
rng = np.random.RandomState(0)
W, _, _ = np.linalg.svd(rng.randn(10, 10))
w = rng.randn(10)
_gs_decorrelation(w, W, 10)
assert_less((w ** 2).sum(), 1.e-10)
w = rng.randn(10)
u = _gs_decorrelation(w, W, 5)
tmp = np.dot(u, W.T)
assert_less((tmp[:5] ** 2).sum(), 1.e-10)
def test_fastica_simple(add_noise=False):
# Test the FastICA algorithm on very simple data.
rng = np.random.RandomState(0)
# scipy.stats uses the global RNG:
np.random.seed(0)
n_samples = 1000
# Generate two sources:
s1 = (2 * np.sin(np.linspace(0, 100, n_samples)) > 0) - 1
s2 = stats.t.rvs(1, size=n_samples)
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing angle
phi = 0.6
mixing = np.array([[np.cos(phi), np.sin(phi)],
[np.sin(phi), -np.cos(phi)]])
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(2, 1000)
center_and_norm(m)
# function as fun arg
def g_test(x):
return x ** 3, (3 * x ** 2).mean(axis=-1)
algos = ['parallel', 'deflation']
nls = ['logcosh', 'exp', 'cube', g_test]
whitening = [True, False]
for algo, nl, whiten in itertools.product(algos, nls, whitening):
if whiten:
k_, mixing_, s_ = fastica(m.T, fun=nl, algorithm=algo)
assert_raises(ValueError, fastica, m.T, fun=np.tanh,
algorithm=algo)
else:
X = PCA(n_components=2, whiten=True).fit_transform(m.T)
k_, mixing_, s_ = fastica(X, fun=nl, algorithm=algo, whiten=False)
assert_raises(ValueError, fastica, X, fun=np.tanh,
algorithm=algo)
s_ = s_.T
# Check that the mixing model described in the docstring holds:
if whiten:
assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m))
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2)
else:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=1)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=1)
# Test FastICA class
_, _, sources_fun = fastica(m.T, fun=nl, algorithm=algo, random_state=0)
ica = FastICA(fun=nl, algorithm=algo, random_state=0)
sources = ica.fit_transform(m.T)
assert_equal(ica.components_.shape, (2, 2))
assert_equal(sources.shape, (1000, 2))
assert_array_almost_equal(sources_fun, sources)
assert_array_almost_equal(sources, ica.transform(m.T))
assert_equal(ica.mixing_.shape, (2, 2))
for fn in [np.tanh, "exp(-.5(x^2))"]:
ica = FastICA(fun=fn, algorithm=algo, random_state=0)
assert_raises(ValueError, ica.fit, m.T)
assert_raises(TypeError, FastICA(fun=moves.xrange(10)).fit, m.T)
def test_fastica_nowhiten():
m = [[0, 1], [1, 0]]
# test for issue #697
ica = FastICA(n_components=1, whiten=False, random_state=0)
assert_warns(UserWarning, ica.fit, m)
assert_true(hasattr(ica, 'mixing_'))
def test_non_square_fastica(add_noise=False):
# Test the FastICA algorithm on very simple data.
rng = np.random.RandomState(0)
n_samples = 1000
# Generate two sources:
t = np.linspace(0, 100, n_samples)
s1 = np.sin(t)
s2 = np.ceil(np.sin(np.pi * t))
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing matrix
mixing = rng.randn(6, 2)
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(6, n_samples)
center_and_norm(m)
k_, mixing_, s_ = fastica(m.T, n_components=2, random_state=rng)
s_ = s_.T
# Check that the mixing model described in the docstring holds:
assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m))
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=3)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=3)
def test_fit_transform():
# Test FastICA.fit_transform
rng = np.random.RandomState(0)
X = rng.random_sample((100, 10))
for whiten, n_components in [[True, 5], [False, None]]:
n_components_ = (n_components if n_components is not None else
X.shape[1])
ica = FastICA(n_components=n_components, whiten=whiten, random_state=0)
Xt = ica.fit_transform(X)
assert_equal(ica.components_.shape, (n_components_, 10))
assert_equal(Xt.shape, (100, n_components_))
ica = FastICA(n_components=n_components, whiten=whiten, random_state=0)
ica.fit(X)
assert_equal(ica.components_.shape, (n_components_, 10))
Xt2 = ica.transform(X)
assert_array_almost_equal(Xt, Xt2)
def test_inverse_transform():
# Test FastICA.inverse_transform
n_features = 10
n_samples = 100
n1, n2 = 5, 10
rng = np.random.RandomState(0)
X = rng.random_sample((n_samples, n_features))
expected = {(True, n1): (n_features, n1),
(True, n2): (n_features, n2),
(False, n1): (n_features, n2),
(False, n2): (n_features, n2)}
for whiten in [True, False]:
for n_components in [n1, n2]:
n_components_ = (n_components if n_components is not None else
X.shape[1])
ica = FastICA(n_components=n_components, random_state=rng,
whiten=whiten)
with warnings.catch_warnings(record=True):
# catch "n_components ignored" warning
Xt = ica.fit_transform(X)
expected_shape = expected[(whiten, n_components_)]
assert_equal(ica.mixing_.shape, expected_shape)
X2 = ica.inverse_transform(Xt)
assert_equal(X.shape, X2.shape)
# reversibility test in non-reduction case
if n_components == X.shape[1]:
assert_array_almost_equal(X, X2)
| bsd-3-clause |
msebire/intellij-community | python/helpers/pydev/pydevconsole.py | 1 | 12520 | '''
Entry point module to start the interactive console.
'''
from _pydev_bundle._pydev_getopt import gnu_getopt
from _pydev_comm.rpc import make_rpc_client, start_rpc_server, start_rpc_server_and_make_client
from _pydev_imps._pydev_saved_modules import thread
start_new_thread = thread.start_new_thread
try:
from code import InteractiveConsole
except ImportError:
from _pydevd_bundle.pydevconsole_code_for_ironpython import InteractiveConsole
import os
import sys
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import INTERACTIVE_MODE_AVAILABLE, dict_keys
from _pydevd_bundle.pydevd_utils import save_main_module
from _pydev_bundle import fix_getpass
fix_getpass.fix_getpass()
from _pydev_bundle.pydev_imports import _queue
try:
import __builtin__
except:
import builtins as __builtin__ # @UnresolvedImport
from _pydev_bundle.pydev_stdin import BaseStdIn
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
from _pydev_bundle.pydev_console_types import Command
IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3
IS_PY24 = sys.version_info[0] == 2 and sys.version_info[1] == 4
try:
try:
execfile #Not in Py3k
except NameError:
from _pydev_bundle.pydev_imports import execfile
__builtin__.execfile = execfile
except:
pass
# Pull in runfile, the interface to UMD that wraps execfile
from _pydev_bundle.pydev_umd import runfile, _set_globals_function
if sys.version_info[0] >= 3:
import builtins # @UnresolvedImport
builtins.runfile = runfile
else:
import __builtin__
__builtin__.runfile = runfile
#=======================================================================================================================
# InterpreterInterface
#=======================================================================================================================
class InterpreterInterface(BaseInterpreterInterface):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, mainThread, connect_status_queue=None, rpc_client=None):
BaseInterpreterInterface.__init__(self, mainThread, connect_status_queue, rpc_client)
self.namespace = {}
self.save_main()
self.interpreter = InteractiveConsole(self.namespace)
self._input_error_printed = False
def save_main(self):
m = save_main_module('<input>', 'pydevconsole')
self.namespace = m.__dict__
try:
self.namespace['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
def do_add_exec(self, codeFragment):
command = Command(self.interpreter, codeFragment)
command.run()
return command.more
def get_namespace(self):
return self.namespace
def close(self):
sys.exit(0)
class _ProcessExecQueueHelper:
_debug_hook = None
_return_control_osc = False
def set_debug_hook(debug_hook):
_ProcessExecQueueHelper._debug_hook = debug_hook
def activate_mpl_if_already_imported(interpreter):
if interpreter.mpl_modules_for_patching:
for module in dict_keys(interpreter.mpl_modules_for_patching):
if module in sys.modules:
activate_function = interpreter.mpl_modules_for_patching.pop(module)
activate_function()
def init_set_return_control_back(interpreter):
from pydev_ipython.inputhook import set_return_control_callback
def return_control():
''' A function that the inputhooks can call (via inputhook.stdin_ready()) to find
out if they should cede control and return '''
if _ProcessExecQueueHelper._debug_hook:
# Some of the input hooks check return control without doing
# a single operation, so we don't return True on every
# call when the debug hook is in place to allow the GUI to run
# XXX: Eventually the inputhook code will have diverged enough
# from the IPython source that it will be worthwhile rewriting
# it rather than pretending to maintain the old API
_ProcessExecQueueHelper._return_control_osc = not _ProcessExecQueueHelper._return_control_osc
if _ProcessExecQueueHelper._return_control_osc:
return True
if not interpreter.exec_queue.empty():
return True
return False
set_return_control_callback(return_control)
def init_mpl_in_console(interpreter):
init_set_return_control_back(interpreter)
if not INTERACTIVE_MODE_AVAILABLE:
return
activate_mpl_if_already_imported(interpreter)
from _pydev_bundle.pydev_import_hook import import_hook_manager
for mod in dict_keys(interpreter.mpl_modules_for_patching):
import_hook_manager.add_module_name(mod, interpreter.mpl_modules_for_patching.pop(mod))
def process_exec_queue(interpreter):
init_mpl_in_console(interpreter)
from pydev_ipython.inputhook import get_inputhook
while 1:
# Running the request may have changed the inputhook in use
inputhook = get_inputhook()
if _ProcessExecQueueHelper._debug_hook:
_ProcessExecQueueHelper._debug_hook()
if inputhook:
try:
# Note: it'll block here until return_control returns True.
inputhook()
except:
import traceback;traceback.print_exc()
try:
try:
code_fragment = interpreter.exec_queue.get(block=True, timeout=1/20.) # 20 calls/second
except _queue.Empty:
continue
if hasattr(code_fragment, '__call__'):
# It can be a callable (i.e.: something that must run in the main
# thread can be put in the queue for later execution).
code_fragment()
else:
more = interpreter.add_exec(code_fragment)
except KeyboardInterrupt:
interpreter.buffer = None
continue
except SystemExit:
raise
except:
type, value, tb = sys.exc_info()
traceback.print_exception(type, value, tb, file=sys.__stderr__)
exit()
if 'IPYTHONENABLE' in os.environ:
IPYTHON = os.environ['IPYTHONENABLE'] == 'True'
else:
IPYTHON = True
try:
try:
exitfunc = sys.exitfunc
except AttributeError:
exitfunc = None
if IPYTHON:
from _pydev_bundle.pydev_ipython_console import InterpreterInterface
if exitfunc is not None:
sys.exitfunc = exitfunc
else:
try:
delattr(sys, 'exitfunc')
except:
pass
except:
IPYTHON = False
pass
#=======================================================================================================================
# _DoExit
#=======================================================================================================================
def do_exit(*args):
'''
We have to override the exit because calling sys.exit will only actually exit the main thread,
and as we're in a Xml-rpc server, that won't work.
'''
try:
import java.lang.System
java.lang.System.exit(1)
except ImportError:
if len(args) == 1:
os._exit(args[0])
else:
os._exit(0)
def enable_thrift_logging():
"""Sets up `thriftpy` logger
The logger is used in `thriftpy/server.py` for logging exceptions.
"""
import logging
# create logger
logger = logging.getLogger('_shaded_thriftpy')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def create_server_handler_factory(interpreter):
def server_handler_factory(rpc_client):
interpreter.rpc_client = rpc_client
return interpreter
return server_handler_factory
def start_server(port):
if port is None:
port = 0
# 0. General stuff
#replace exit (see comments on method)
#note that this does not work in jython!!! (sys method can't be replaced).
sys.exit = do_exit
from pydev_console.protocol import PythonConsoleBackendService, PythonConsoleFrontendService
enable_thrift_logging()
server_service = PythonConsoleBackendService
client_service = PythonConsoleFrontendService
# 1. Start Python console server
# `InterpreterInterface` implements all methods required for `server_handler`
interpreter = InterpreterInterface(threading.currentThread())
# Tell UMD the proper default namespace
_set_globals_function(interpreter.get_namespace)
server_socket = start_rpc_server_and_make_client('', port, server_service, client_service, create_server_handler_factory(interpreter))
# 2. Print server port for the IDE
_, server_port = server_socket.getsockname()
print(server_port)
# 3. Wait for IDE to connect to the server
process_exec_queue(interpreter)
def start_client(host, port):
#replace exit (see comments on method)
#note that this does not work in jython!!! (sys method can't be replaced).
sys.exit = do_exit
from pydev_console.protocol import PythonConsoleBackendService, PythonConsoleFrontendService
enable_thrift_logging()
client_service = PythonConsoleFrontendService
client, server_transport = make_rpc_client(client_service, host, port)
interpreter = InterpreterInterface(threading.currentThread(), rpc_client=client)
# we do not need to start the server in a new thread because it does not need to accept a client connection, it already has it
# Tell UMD the proper default namespace
_set_globals_function(interpreter.get_namespace)
server_service = PythonConsoleBackendService
# `InterpreterInterface` implements all methods required for the handler
server_handler = interpreter
start_rpc_server(server_transport, server_service, server_handler)
process_exec_queue(interpreter)
def get_interpreter():
try:
interpreterInterface = getattr(__builtin__, 'interpreter')
except AttributeError:
interpreterInterface = InterpreterInterface(None, None, threading.currentThread())
__builtin__.interpreter = interpreterInterface
print(interpreterInterface.get_greeting_msg())
return interpreterInterface
def get_completions(text, token, globals, locals):
interpreterInterface = get_interpreter()
interpreterInterface.interpreter.update(globals, locals)
return interpreterInterface.getCompletions(text, token)
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
#Important: don't use this module directly as the __main__ module, rather, import itself as pydevconsole
#so that we don't get multiple pydevconsole modules if it's executed directly (otherwise we'd have multiple
#representations of its classes).
#See: https://sw-brainwy.rhcloud.com/tracker/PyDev/446:
#'Variables' and 'Expressions' views stopped working when debugging interactive console
import pydevconsole
sys.stdin = pydevconsole.BaseStdIn(sys.stdin)
# parse command-line arguments
optlist, _ = gnu_getopt(sys.argv, 'm:h:p', ['mode=', 'host=', 'port='])
mode = None
host = None
port = None
for opt, arg in optlist:
if opt in ('-m', '--mode'):
mode = arg
elif opt in ('-h', '--host'):
host = arg
elif opt in ('-p', '--port'):
port = int(arg)
if mode not in ('client', 'server'):
sys.exit(-1)
if mode == 'client':
if not port:
# port must be set for client
sys.exit(-1)
if not host:
from _pydev_bundle import pydev_localhost
host = client_host = pydev_localhost.get_localhost()
pydevconsole.start_client(host, port)
elif mode == 'server':
pydevconsole.start_server(port)
| apache-2.0 |
MarauderXtreme/sipa | sipa/model/finance.py | 1 | 2369 | from abc import ABCMeta, abstractmethod
from flask_babel import gettext
from sipa.model.fancy_property import ActiveProperty, Capabilities
from sipa.units import format_money
from sipa.utils import compare_all_attributes
class BaseFinanceInformation(metaclass=ABCMeta):
"""A Class providing finance information about a user.
This class bundles informations such as whether the current user
is obliged to pay, the current balance, history, and the time of
the last update. The balance is being provided as a
FancyProperty, so it can be used as a row.
For subclassing, implement the respective abstract
methods/properties.
"""
@property
def balance(self):
"""The current balance as a
:py:class:`~sipa.model.fancy_property.ActiveProperty`
If :attr:`has_to_pay` is False, return an empty property with
the note that the user doesn't have to pay. Else, return the
balance formatted and styled (red/green) as money and mark
this property editable.
"""
if not self.has_to_pay:
return ActiveProperty('finance_balance',
value=gettext("Beitrag in Miete inbegriffen"),
raw_value=0, empty=True)
return ActiveProperty('finance_balance',
value=format_money(self.raw_balance),
raw_value=self.raw_balance)
@property
@abstractmethod
def raw_balance(self) -> float:
"""**[Abstract]** The current balance
If :py:meth:`has_to_pay` is False, this method will not be
used implicitly.
"""
pass
@property
@abstractmethod
def has_to_pay(self) -> bool:
"""**[Abstract]** Whether the user is obliged to pay."""
pass
@property
@abstractmethod
def history(self):
"""**[Abstract]** History of payments
This method should return an iterable of a (datetime, int)
tuple.
"""
pass
@property
@abstractmethod
def last_update(self):
"""**[Abstract]** The time of the last update."""
pass
def __eq__(self, other):
return compare_all_attributes(self, other, ['raw_balance', 'has_to_pay',
'history', 'last_update'])
| mit |
untom/scikit-learn | sklearn/decomposition/base.py | 313 | 5647 | """Principal Component Analysis Base Classes"""
# Author: Alexandre Gramfort <[email protected]>
# Olivier Grisel <[email protected]>
# Mathieu Blondel <[email protected]>
# Denis A. Engemann <[email protected]>
# Kyle Kastner <[email protected]>
#
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array
from ..utils.extmath import fast_dot
from ..utils.validation import check_is_fitted
from ..externals import six
from abc import ABCMeta, abstractmethod
class _BasePCA(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)):
"""Base class for PCA methods.
Warning: This class should not be used directly.
Use derived classes instead.
"""
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances, and sigma2 contains the
noise variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
precision = np.dot(components_, components_.T) / self.noise_variance_
precision.flat[::len(precision) + 1] += 1. / exp_var_diff
precision = np.dot(components_.T,
np.dot(linalg.inv(precision), components_))
precision /= -(self.noise_variance_ ** 2)
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
return precision
@abstractmethod
def fit(X, y=None):
"""Placeholder for fit. Subclasses should implement this method!
Fit the model with X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
def transform(self, X, y=None):
"""Apply dimensionality reduction to X.
X is projected on the first principal components previously extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import IncrementalPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> ipca = IncrementalPCA(n_components=2, batch_size=3)
>>> ipca.fit(X)
IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False)
>>> ipca.transform(X) # doctest: +SKIP
"""
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
if self.whiten:
X_transformed /= np.sqrt(self.explained_variance_)
return X_transformed
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
In other words, return an input X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform will compute the
exact inverse operation, which includes reversing whitening.
"""
if self.whiten:
return fast_dot(X, np.sqrt(self.explained_variance_[:, np.newaxis]) *
self.components_) + self.mean_
else:
return fast_dot(X, self.components_) + self.mean_
| bsd-3-clause |
rogerthat-platform/rogerthat-backend | src-test/rogerthat_tests/mobicage/bizz/test_system.py | 1 | 1331 | # -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
from rogerthat.bizz.system import unregister_mobile
from rogerthat.bizz.user import archiveUserDataAfterDisconnect
from rogerthat.dal.friend import get_friends_map
from rogerthat.dal.profile import get_user_profile
from rogerthat.rpc import users
import mc_unittest
class Test(mc_unittest.TestCase):
def setUp(self):
mc_unittest.TestCase.setUp(self, datastore_hr_probability=1)
def test_unregister_deactivated_account(self):
user = users.get_current_user()
mobile = users.get_current_mobile()
# deactivate account
archiveUserDataAfterDisconnect(user, get_friends_map(user), get_user_profile(user), False)
unregister_mobile(user, mobile)
| apache-2.0 |
oceane10712/software_testing_stage2 | test/gtest_shuffle_test.py | 3023 | 12549 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
LuqueDaniel/ninja-ide | ninja_ide/gui/dialogs/preferences/preferences_editor_configuration.py | 6 | 13499 | # -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from PyQt4.QtGui import QWidget
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QGroupBox
from PyQt4.QtGui import QCheckBox
from PyQt4.QtGui import QGridLayout
from PyQt4.QtGui import QSpacerItem
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QSpinBox
from PyQt4.QtGui import QComboBox
from PyQt4.QtGui import QSizePolicy
from PyQt4.QtCore import Qt
from PyQt4.QtCore import SIGNAL
from ninja_ide import translations
from ninja_ide.core import settings
from ninja_ide.gui.ide import IDE
from ninja_ide.gui.dialogs.preferences import preferences
class EditorConfiguration(QWidget):
"""EditorConfiguration widget class"""
def __init__(self, parent):
super(EditorConfiguration, self).__init__()
self._preferences, vbox = parent, QVBoxLayout(self)
# groups
group1 = QGroupBox(translations.TR_PREFERENCES_EDITOR_CONFIG_INDENT)
group2 = QGroupBox(translations.TR_PREFERENCES_EDITOR_CONFIG_MARGIN)
group3 = QGroupBox(translations.TR_LINT_DIRTY_TEXT)
group4 = QGroupBox(translations.TR_PEP8_DIRTY_TEXT)
group5 = QGroupBox(translations.TR_HIGHLIGHTER_EXTRAS)
group6 = QGroupBox(translations.TR_TYPING_ASSISTANCE)
group7 = QGroupBox(translations.TR_DISPLAY)
# groups container
container_widget_with_all_preferences = QWidget()
formFeatures = QGridLayout(container_widget_with_all_preferences)
# Indentation
hboxg1 = QHBoxLayout(group1)
hboxg1.setContentsMargins(5, 15, 5, 5)
self._spin, self._checkUseTabs = QSpinBox(), QComboBox()
self._spin.setRange(1, 10)
self._spin.setValue(settings.INDENT)
hboxg1.addWidget(self._spin)
self._checkUseTabs.addItems([
translations.TR_PREFERENCES_EDITOR_CONFIG_SPACES.capitalize(),
translations.TR_PREFERENCES_EDITOR_CONFIG_TABS.capitalize()])
self._checkUseTabs.setCurrentIndex(int(settings.USE_TABS))
hboxg1.addWidget(self._checkUseTabs)
formFeatures.addWidget(group1, 0, 0)
# Margin Line
hboxg2 = QHBoxLayout(group2)
hboxg2.setContentsMargins(5, 15, 5, 5)
self._checkShowMargin = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_MARGIN_LINE)
self._checkShowMargin.setChecked(settings.SHOW_MARGIN_LINE)
hboxg2.addWidget(self._checkShowMargin)
self._spinMargin = QSpinBox()
self._spinMargin.setRange(50, 100)
self._spinMargin.setSingleStep(2)
self._spinMargin.setValue(settings.MARGIN_LINE)
hboxg2.addWidget(self._spinMargin)
hboxg2.addWidget(QLabel(translations.TR_CHARACTERS))
formFeatures.addWidget(group2, 0, 1)
# Display Errors
vboxDisplay = QVBoxLayout(group7)
vboxDisplay.setContentsMargins(5, 15, 5, 5)
self._checkHighlightLine = QComboBox()
self._checkHighlightLine.addItems([
translations.TR_PREFERENCES_EDITOR_CONFIG_ERROR_USE_BACKGROUND,
translations.TR_PREFERENCES_EDITOR_CONFIG_ERROR_USE_UNDERLINE])
self._checkHighlightLine.setCurrentIndex(
int(settings.UNDERLINE_NOT_BACKGROUND))
hboxDisplay1 = QHBoxLayout()
hboxDisplay1.addWidget(QLabel(translations.TR_DISPLAY_ERRORS))
hboxDisplay1.addWidget(self._checkHighlightLine)
hboxDisplay2 = QHBoxLayout()
self._checkDisplayLineNumbers = QCheckBox(
translations.TR_DISPLAY_LINE_NUMBERS)
self._checkDisplayLineNumbers.setChecked(settings.SHOW_LINE_NUMBERS)
hboxDisplay2.addWidget(self._checkDisplayLineNumbers)
vboxDisplay.addLayout(hboxDisplay1)
vboxDisplay.addLayout(hboxDisplay2)
formFeatures.addWidget(group7, 1, 0, 1, 0)
# Find Lint Errors (highlighter)
vboxg3 = QVBoxLayout(group3)
self._checkErrors = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_FIND_ERRORS)
self._checkErrors.setChecked(settings.FIND_ERRORS)
self.connect(self._checkErrors, SIGNAL("stateChanged(int)"),
self._disable_show_errors)
self._showErrorsOnLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TOOLTIP_ERRORS)
self._showErrorsOnLine.setChecked(settings.ERRORS_HIGHLIGHT_LINE)
self.connect(self._showErrorsOnLine, SIGNAL("stateChanged(int)"),
self._enable_errors_inline)
vboxg3.addWidget(self._checkErrors)
vboxg3.addWidget(self._showErrorsOnLine)
formFeatures.addWidget(group3, 2, 0)
# Find PEP8 Errors (highlighter)
vboxg4 = QVBoxLayout(group4)
vboxg4.setContentsMargins(5, 15, 5, 5)
self._checkStyle = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_PEP8)
self._checkStyle.setChecked(settings.CHECK_STYLE)
self.connect(self._checkStyle, SIGNAL("stateChanged(int)"),
self._disable_check_style)
vboxg4.addWidget(self._checkStyle)
self._checkStyleOnLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TOOLTIP_PEP8)
self._checkStyleOnLine.setChecked(settings.CHECK_HIGHLIGHT_LINE)
self.connect(self._checkStyleOnLine, SIGNAL("stateChanged(int)"),
self._enable_check_inline)
vboxg4.addWidget(self._checkStyleOnLine)
formFeatures.addWidget(group4, 2, 1)
# Show Python3 Migration, DocStrings and Spaces (highlighter)
vboxg5 = QVBoxLayout(group5)
vboxg5.setContentsMargins(5, 15, 5, 5)
self._showMigrationTips = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_MIGRATION)
self._showMigrationTips.setChecked(settings.SHOW_MIGRATION_TIPS)
vboxg5.addWidget(self._showMigrationTips)
self._checkForDocstrings = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_CHECK_FOR_DOCSTRINGS)
self._checkForDocstrings.setChecked(settings.CHECK_FOR_DOCSTRINGS)
vboxg5.addWidget(self._checkForDocstrings)
self._checkShowSpaces = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TABS_AND_SPACES)
self._checkShowSpaces.setChecked(settings.SHOW_TABS_AND_SPACES)
vboxg5.addWidget(self._checkShowSpaces)
self._checkIndentationGuide = QCheckBox(
translations.TR_SHOW_INDENTATION_GUIDE)
self._checkIndentationGuide.setChecked(settings.SHOW_INDENTATION_GUIDE)
vboxg5.addWidget(self._checkIndentationGuide)
formFeatures.addWidget(group5, 3, 0)
# End of line, Center On Scroll, Trailing space, Word wrap
vboxg6 = QVBoxLayout(group6)
vboxg6.setContentsMargins(5, 15, 5, 5)
self._checkEndOfLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_END_OF_LINE)
self._checkEndOfLine.setChecked(settings.USE_PLATFORM_END_OF_LINE)
vboxg6.addWidget(self._checkEndOfLine)
self._checkCenterScroll = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_CENTER_SCROLL)
self._checkCenterScroll.setChecked(settings.CENTER_ON_SCROLL)
vboxg6.addWidget(self._checkCenterScroll)
self._checkTrailing = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_REMOVE_TRAILING)
self._checkTrailing.setChecked(settings.REMOVE_TRAILING_SPACES)
vboxg6.addWidget(self._checkTrailing)
self._allowWordWrap = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_WORD_WRAP)
self._allowWordWrap.setChecked(settings.ALLOW_WORD_WRAP)
vboxg6.addWidget(self._allowWordWrap)
formFeatures.addWidget(group6, 3, 1)
# pack all the groups
vbox.addWidget(container_widget_with_all_preferences)
vbox.addItem(QSpacerItem(0, 10, QSizePolicy.Expanding,
QSizePolicy.Expanding))
self.connect(self._preferences, SIGNAL("savePreferences()"), self.save)
def _enable_check_inline(self, val):
"""Method that takes a value to enable the inline style checking"""
if val == Qt.Checked:
self._checkStyle.setChecked(True)
def _enable_errors_inline(self, val):
"""Method that takes a value to enable the inline errors checking"""
if val == Qt.Checked:
self._checkErrors.setChecked(True)
def _disable_check_style(self, val):
"""Method that takes a value to disable the inline style checking"""
if val == Qt.Unchecked:
self._checkStyleOnLine.setChecked(False)
def _disable_show_errors(self, val):
"""Method that takes a value to disable the inline errors checking"""
if val == Qt.Unchecked:
self._showErrorsOnLine.setChecked(False)
def save(self):
"""Method to save settings"""
qsettings = IDE.ninja_settings()
settings.USE_TABS = bool(self._checkUseTabs.currentIndex())
qsettings.setValue('preferences/editor/useTabs',
settings.USE_TABS)
margin_line = self._spinMargin.value()
settings.MARGIN_LINE = margin_line
settings.pep8mod_update_margin_line_length(margin_line)
qsettings.setValue('preferences/editor/marginLine', margin_line)
settings.SHOW_MARGIN_LINE = self._checkShowMargin.isChecked()
qsettings.setValue('preferences/editor/showMarginLine',
settings.SHOW_MARGIN_LINE)
settings.INDENT = self._spin.value()
qsettings.setValue('preferences/editor/indent', settings.INDENT)
endOfLine = self._checkEndOfLine.isChecked()
settings.USE_PLATFORM_END_OF_LINE = endOfLine
qsettings.setValue('preferences/editor/platformEndOfLine', endOfLine)
settings.UNDERLINE_NOT_BACKGROUND = \
bool(self._checkHighlightLine.currentIndex())
qsettings.setValue('preferences/editor/errorsUnderlineBackground',
settings.UNDERLINE_NOT_BACKGROUND)
settings.FIND_ERRORS = self._checkErrors.isChecked()
qsettings.setValue('preferences/editor/errors', settings.FIND_ERRORS)
settings.ERRORS_HIGHLIGHT_LINE = self._showErrorsOnLine.isChecked()
qsettings.setValue('preferences/editor/errorsInLine',
settings.ERRORS_HIGHLIGHT_LINE)
settings.CHECK_STYLE = self._checkStyle.isChecked()
qsettings.setValue('preferences/editor/checkStyle',
settings.CHECK_STYLE)
settings.SHOW_MIGRATION_TIPS = self._showMigrationTips.isChecked()
qsettings.setValue('preferences/editor/showMigrationTips',
settings.SHOW_MIGRATION_TIPS)
settings.CHECK_HIGHLIGHT_LINE = self._checkStyleOnLine.isChecked()
qsettings.setValue('preferences/editor/checkStyleInline',
settings.CHECK_HIGHLIGHT_LINE)
settings.CENTER_ON_SCROLL = self._checkCenterScroll.isChecked()
qsettings.setValue('preferences/editor/centerOnScroll',
settings.CENTER_ON_SCROLL)
settings.REMOVE_TRAILING_SPACES = self._checkTrailing.isChecked()
qsettings.setValue('preferences/editor/removeTrailingSpaces',
settings.REMOVE_TRAILING_SPACES)
settings.ALLOW_WORD_WRAP = self._allowWordWrap.isChecked()
qsettings.setValue('preferences/editor/allowWordWrap',
settings.ALLOW_WORD_WRAP)
settings.SHOW_TABS_AND_SPACES = self._checkShowSpaces.isChecked()
qsettings.setValue('preferences/editor/showTabsAndSpaces',
settings.SHOW_TABS_AND_SPACES)
settings.SHOW_INDENTATION_GUIDE = (
self._checkIndentationGuide.isChecked())
qsettings.setValue('preferences/editor/showIndentationGuide',
settings.SHOW_INDENTATION_GUIDE)
settings.CHECK_FOR_DOCSTRINGS = self._checkForDocstrings.isChecked()
qsettings.setValue('preferences/editor/checkForDocstrings',
settings.CHECK_FOR_DOCSTRINGS)
settings.SHOW_LINE_NUMBERS = self._checkDisplayLineNumbers.isChecked()
qsettings.setValue('preferences/editor/showLineNumbers',
settings.SHOW_LINE_NUMBERS)
if settings.USE_TABS:
settings.pep8mod_add_ignore("W191")
else:
settings.pep8mod_remove_ignore("W191")
settings.pep8mod_refresh_checks()
preferences.Preferences.register_configuration(
'EDITOR', EditorConfiguration,
translations.TR_PREFERENCES_EDITOR_CONFIGURATION,
weight=0, subsection='CONFIGURATION')
| gpl-3.0 |
catchmrbharath/servo | tests/wpt/css-tests/tools/html5lib/html5lib/constants.py | 963 | 87346 | from __future__ import absolute_import, division, unicode_literals
import string
import gettext
_ = gettext.gettext
EOF = None
E = {
"null-character":
_("Null character in input stream, replaced with U+FFFD."),
"invalid-codepoint":
_("Invalid codepoint in stream."),
"incorrectly-placed-solidus":
_("Solidus (/) incorrectly placed in tag."),
"incorrect-cr-newline-entity":
_("Incorrect CR newline entity, replaced with LF."),
"illegal-windows-1252-entity":
_("Entity used with illegal number (windows-1252 reference)."),
"cant-convert-numeric-entity":
_("Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x)."),
"illegal-codepoint-for-numeric-entity":
_("Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x."),
"numeric-entity-without-semicolon":
_("Numeric entity didn't end with ';'."),
"expected-numeric-entity-but-got-eof":
_("Numeric entity expected. Got end of file instead."),
"expected-numeric-entity":
_("Numeric entity expected but none found."),
"named-entity-without-semicolon":
_("Named entity didn't end with ';'."),
"expected-named-entity":
_("Named entity expected. Got none."),
"attributes-in-end-tag":
_("End tag contains unexpected attributes."),
'self-closing-flag-on-end-tag':
_("End tag contains unexpected self-closing flag."),
"expected-tag-name-but-got-right-bracket":
_("Expected tag name. Got '>' instead."),
"expected-tag-name-but-got-question-mark":
_("Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)"),
"expected-tag-name":
_("Expected tag name. Got something else instead"),
"expected-closing-tag-but-got-right-bracket":
_("Expected closing tag. Got '>' instead. Ignoring '</>'."),
"expected-closing-tag-but-got-eof":
_("Expected closing tag. Unexpected end of file."),
"expected-closing-tag-but-got-char":
_("Expected closing tag. Unexpected character '%(data)s' found."),
"eof-in-tag-name":
_("Unexpected end of file in the tag name."),
"expected-attribute-name-but-got-eof":
_("Unexpected end of file. Expected attribute name instead."),
"eof-in-attribute-name":
_("Unexpected end of file in attribute name."),
"invalid-character-in-attribute-name":
_("Invalid character in attribute name"),
"duplicate-attribute":
_("Dropped duplicate attribute on tag."),
"expected-end-of-tag-name-but-got-eof":
_("Unexpected end of file. Expected = or end of tag."),
"expected-attribute-value-but-got-eof":
_("Unexpected end of file. Expected attribute value."),
"expected-attribute-value-but-got-right-bracket":
_("Expected attribute value. Got '>' instead."),
'equals-in-unquoted-attribute-value':
_("Unexpected = in unquoted attribute"),
'unexpected-character-in-unquoted-attribute-value':
_("Unexpected character in unquoted attribute"),
"invalid-character-after-attribute-name":
_("Unexpected character after attribute name."),
"unexpected-character-after-attribute-value":
_("Unexpected character after attribute value."),
"eof-in-attribute-value-double-quote":
_("Unexpected end of file in attribute value (\")."),
"eof-in-attribute-value-single-quote":
_("Unexpected end of file in attribute value (')."),
"eof-in-attribute-value-no-quotes":
_("Unexpected end of file in attribute value."),
"unexpected-EOF-after-solidus-in-tag":
_("Unexpected end of file in tag. Expected >"),
"unexpected-character-after-solidus-in-tag":
_("Unexpected character after / in tag. Expected >"),
"expected-dashes-or-doctype":
_("Expected '--' or 'DOCTYPE'. Not found."),
"unexpected-bang-after-double-dash-in-comment":
_("Unexpected ! after -- in comment"),
"unexpected-space-after-double-dash-in-comment":
_("Unexpected space after -- in comment"),
"incorrect-comment":
_("Incorrect comment."),
"eof-in-comment":
_("Unexpected end of file in comment."),
"eof-in-comment-end-dash":
_("Unexpected end of file in comment (-)"),
"unexpected-dash-after-double-dash-in-comment":
_("Unexpected '-' after '--' found in comment."),
"eof-in-comment-double-dash":
_("Unexpected end of file in comment (--)."),
"eof-in-comment-end-space-state":
_("Unexpected end of file in comment."),
"eof-in-comment-end-bang-state":
_("Unexpected end of file in comment."),
"unexpected-char-in-comment":
_("Unexpected character in comment found."),
"need-space-after-doctype":
_("No space after literal string 'DOCTYPE'."),
"expected-doctype-name-but-got-right-bracket":
_("Unexpected > character. Expected DOCTYPE name."),
"expected-doctype-name-but-got-eof":
_("Unexpected end of file. Expected DOCTYPE name."),
"eof-in-doctype-name":
_("Unexpected end of file in DOCTYPE name."),
"eof-in-doctype":
_("Unexpected end of file in DOCTYPE."),
"expected-space-or-right-bracket-in-doctype":
_("Expected space or '>'. Got '%(data)s'"),
"unexpected-end-of-doctype":
_("Unexpected end of DOCTYPE."),
"unexpected-char-in-doctype":
_("Unexpected character in DOCTYPE."),
"eof-in-innerhtml":
_("XXX innerHTML EOF"),
"unexpected-doctype":
_("Unexpected DOCTYPE. Ignored."),
"non-html-root":
_("html needs to be the first start tag."),
"expected-doctype-but-got-eof":
_("Unexpected End of file. Expected DOCTYPE."),
"unknown-doctype":
_("Erroneous DOCTYPE."),
"expected-doctype-but-got-chars":
_("Unexpected non-space characters. Expected DOCTYPE."),
"expected-doctype-but-got-start-tag":
_("Unexpected start tag (%(name)s). Expected DOCTYPE."),
"expected-doctype-but-got-end-tag":
_("Unexpected end tag (%(name)s). Expected DOCTYPE."),
"end-tag-after-implied-root":
_("Unexpected end tag (%(name)s) after the (implied) root element."),
"expected-named-closing-tag-but-got-eof":
_("Unexpected end of file. Expected end tag (%(name)s)."),
"two-heads-are-not-better-than-one":
_("Unexpected start tag head in existing head. Ignored."),
"unexpected-end-tag":
_("Unexpected end tag (%(name)s). Ignored."),
"unexpected-start-tag-out-of-my-head":
_("Unexpected start tag (%(name)s) that can be in head. Moved."),
"unexpected-start-tag":
_("Unexpected start tag (%(name)s)."),
"missing-end-tag":
_("Missing end tag (%(name)s)."),
"missing-end-tags":
_("Missing end tags (%(name)s)."),
"unexpected-start-tag-implies-end-tag":
_("Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s)."),
"unexpected-start-tag-treated-as":
_("Unexpected start tag (%(originalName)s). Treated as %(newName)s."),
"deprecated-tag":
_("Unexpected start tag %(name)s. Don't use it!"),
"unexpected-start-tag-ignored":
_("Unexpected start tag %(name)s. Ignored."),
"expected-one-end-tag-but-got-another":
_("Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s)."),
"end-tag-too-early":
_("End tag (%(name)s) seen too early. Expected other end tag."),
"end-tag-too-early-named":
_("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."),
"end-tag-too-early-ignored":
_("End tag (%(name)s) seen too early. Ignored."),
"adoption-agency-1.1":
_("End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm."),
"adoption-agency-1.2":
_("End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm."),
"adoption-agency-1.3":
_("End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm."),
"adoption-agency-4.4":
_("End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm."),
"unexpected-end-tag-treated-as":
_("Unexpected end tag (%(originalName)s). Treated as %(newName)s."),
"no-end-tag":
_("This element (%(name)s) has no end tag."),
"unexpected-implied-end-tag-in-table":
_("Unexpected implied end tag (%(name)s) in the table phase."),
"unexpected-implied-end-tag-in-table-body":
_("Unexpected implied end tag (%(name)s) in the table body phase."),
"unexpected-char-implies-table-voodoo":
_("Unexpected non-space characters in "
"table context caused voodoo mode."),
"unexpected-hidden-input-in-table":
_("Unexpected input with type hidden in table context."),
"unexpected-form-in-table":
_("Unexpected form in table context."),
"unexpected-start-tag-implies-table-voodoo":
_("Unexpected start tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-end-tag-implies-table-voodoo":
_("Unexpected end tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-cell-in-table-body":
_("Unexpected table cell start tag (%(name)s) "
"in the table body phase."),
"unexpected-cell-end-tag":
_("Got table cell end tag (%(name)s) "
"while required end tags are missing."),
"unexpected-end-tag-in-table-body":
_("Unexpected end tag (%(name)s) in the table body phase. Ignored."),
"unexpected-implied-end-tag-in-table-row":
_("Unexpected implied end tag (%(name)s) in the table row phase."),
"unexpected-end-tag-in-table-row":
_("Unexpected end tag (%(name)s) in the table row phase. Ignored."),
"unexpected-select-in-select":
_("Unexpected select start tag in the select phase "
"treated as select end tag."),
"unexpected-input-in-select":
_("Unexpected input start tag in the select phase."),
"unexpected-start-tag-in-select":
_("Unexpected start tag token (%(name)s in the select phase. "
"Ignored."),
"unexpected-end-tag-in-select":
_("Unexpected end tag (%(name)s) in the select phase. Ignored."),
"unexpected-table-element-start-tag-in-select-in-table":
_("Unexpected table element start tag (%(name)s) in the select in table phase."),
"unexpected-table-element-end-tag-in-select-in-table":
_("Unexpected table element end tag (%(name)s) in the select in table phase."),
"unexpected-char-after-body":
_("Unexpected non-space characters in the after body phase."),
"unexpected-start-tag-after-body":
_("Unexpected start tag token (%(name)s)"
" in the after body phase."),
"unexpected-end-tag-after-body":
_("Unexpected end tag token (%(name)s)"
" in the after body phase."),
"unexpected-char-in-frameset":
_("Unexpected characters in the frameset phase. Characters ignored."),
"unexpected-start-tag-in-frameset":
_("Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-frameset-in-frameset-innerhtml":
_("Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML)."),
"unexpected-end-tag-in-frameset":
_("Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-char-after-frameset":
_("Unexpected non-space characters in the "
"after frameset phase. Ignored."),
"unexpected-start-tag-after-frameset":
_("Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-frameset":
_("Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-body-innerhtml":
_("Unexpected end tag after body(innerHtml)"),
"expected-eof-but-got-char":
_("Unexpected non-space characters. Expected end of file."),
"expected-eof-but-got-start-tag":
_("Unexpected start tag (%(name)s)"
". Expected end of file."),
"expected-eof-but-got-end-tag":
_("Unexpected end tag (%(name)s)"
". Expected end of file."),
"eof-in-table":
_("Unexpected end of file. Expected table content."),
"eof-in-select":
_("Unexpected end of file. Expected select content."),
"eof-in-frameset":
_("Unexpected end of file. Expected frameset content."),
"eof-in-script-in-script":
_("Unexpected end of file. Expected script content."),
"eof-in-foreign-lands":
_("Unexpected end of file. Expected foreign content"),
"non-void-element-with-trailing-solidus":
_("Trailing solidus not allowed on element %(name)s"),
"unexpected-html-element-in-foreign-content":
_("Element %(name)s not allowed in a non-html context"),
"unexpected-end-tag-before-html":
_("Unexpected end tag (%(name)s) before html."),
"XXX-undefined-error":
_("Undefined error (this sucks and should be fixed)"),
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset((
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
))
formattingElements = frozenset((
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
))
specialElements = frozenset((
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
))
htmlIntegrationPointElements = frozenset((
(namespaces["mathml"], "annotaion-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
))
mathmlTextIntegrationPointElements = frozenset((
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
))
adjustForeignAttributes = {
"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
"xlink:href": ("xlink", "href", namespaces["xlink"]),
"xlink:role": ("xlink", "role", namespaces["xlink"]),
"xlink:show": ("xlink", "show", namespaces["xlink"]),
"xlink:title": ("xlink", "title", namespaces["xlink"]),
"xlink:type": ("xlink", "type", namespaces["xlink"]),
"xml:base": ("xml", "base", namespaces["xml"]),
"xml:lang": ("xml", "lang", namespaces["xml"]),
"xml:space": ("xml", "space", namespaces["xml"]),
"xmlns": (None, "xmlns", namespaces["xmlns"]),
"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}
unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
adjustForeignAttributes.items()])
spaceCharacters = frozenset((
"\t",
"\n",
"\u000C",
" ",
"\r"
))
tableInsertModeElements = frozenset((
"table",
"tbody",
"tfoot",
"thead",
"tr"
))
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset((
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
))
cdataElements = frozenset(('title', 'textarea'))
rcdataElements = frozenset((
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
))
booleanAttributes = {
"": frozenset(("irrelevant",)),
"style": frozenset(("scoped",)),
"img": frozenset(("ismap",)),
"audio": frozenset(("autoplay", "controls")),
"video": frozenset(("autoplay", "controls")),
"script": frozenset(("defer", "async")),
"details": frozenset(("open",)),
"datagrid": frozenset(("multiple", "disabled")),
"command": frozenset(("hidden", "disabled", "checked", "default")),
"hr": frozenset(("noshade")),
"menu": frozenset(("autosubmit",)),
"fieldset": frozenset(("disabled", "readonly")),
"option": frozenset(("disabled", "readonly", "selected")),
"optgroup": frozenset(("disabled", "readonly")),
"button": frozenset(("disabled", "autofocus")),
"input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")),
"select": frozenset(("disabled", "readonly", "autofocus", "multiple")),
"output": frozenset(("disabled", "readonly")),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;'))
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
encodings = {
'437': 'cp437',
'850': 'cp850',
'852': 'cp852',
'855': 'cp855',
'857': 'cp857',
'860': 'cp860',
'861': 'cp861',
'862': 'cp862',
'863': 'cp863',
'865': 'cp865',
'866': 'cp866',
'869': 'cp869',
'ansix341968': 'ascii',
'ansix341986': 'ascii',
'arabic': 'iso8859-6',
'ascii': 'ascii',
'asmo708': 'iso8859-6',
'big5': 'big5',
'big5hkscs': 'big5hkscs',
'chinese': 'gbk',
'cp037': 'cp037',
'cp1026': 'cp1026',
'cp154': 'ptcp154',
'cp367': 'ascii',
'cp424': 'cp424',
'cp437': 'cp437',
'cp500': 'cp500',
'cp775': 'cp775',
'cp819': 'windows-1252',
'cp850': 'cp850',
'cp852': 'cp852',
'cp855': 'cp855',
'cp857': 'cp857',
'cp860': 'cp860',
'cp861': 'cp861',
'cp862': 'cp862',
'cp863': 'cp863',
'cp864': 'cp864',
'cp865': 'cp865',
'cp866': 'cp866',
'cp869': 'cp869',
'cp936': 'gbk',
'cpgr': 'cp869',
'cpis': 'cp861',
'csascii': 'ascii',
'csbig5': 'big5',
'cseuckr': 'cp949',
'cseucpkdfmtjapanese': 'euc_jp',
'csgb2312': 'gbk',
'cshproman8': 'hp-roman8',
'csibm037': 'cp037',
'csibm1026': 'cp1026',
'csibm424': 'cp424',
'csibm500': 'cp500',
'csibm855': 'cp855',
'csibm857': 'cp857',
'csibm860': 'cp860',
'csibm861': 'cp861',
'csibm863': 'cp863',
'csibm864': 'cp864',
'csibm865': 'cp865',
'csibm866': 'cp866',
'csibm869': 'cp869',
'csiso2022jp': 'iso2022_jp',
'csiso2022jp2': 'iso2022_jp_2',
'csiso2022kr': 'iso2022_kr',
'csiso58gb231280': 'gbk',
'csisolatin1': 'windows-1252',
'csisolatin2': 'iso8859-2',
'csisolatin3': 'iso8859-3',
'csisolatin4': 'iso8859-4',
'csisolatin5': 'windows-1254',
'csisolatin6': 'iso8859-10',
'csisolatinarabic': 'iso8859-6',
'csisolatincyrillic': 'iso8859-5',
'csisolatingreek': 'iso8859-7',
'csisolatinhebrew': 'iso8859-8',
'cskoi8r': 'koi8-r',
'csksc56011987': 'cp949',
'cspc775baltic': 'cp775',
'cspc850multilingual': 'cp850',
'cspc862latinhebrew': 'cp862',
'cspc8codepage437': 'cp437',
'cspcp852': 'cp852',
'csptcp154': 'ptcp154',
'csshiftjis': 'shift_jis',
'csunicode11utf7': 'utf-7',
'cyrillic': 'iso8859-5',
'cyrillicasian': 'ptcp154',
'ebcdiccpbe': 'cp500',
'ebcdiccpca': 'cp037',
'ebcdiccpch': 'cp500',
'ebcdiccphe': 'cp424',
'ebcdiccpnl': 'cp037',
'ebcdiccpus': 'cp037',
'ebcdiccpwt': 'cp037',
'ecma114': 'iso8859-6',
'ecma118': 'iso8859-7',
'elot928': 'iso8859-7',
'eucjp': 'euc_jp',
'euckr': 'cp949',
'extendedunixcodepackedformatforjapanese': 'euc_jp',
'gb18030': 'gb18030',
'gb2312': 'gbk',
'gb231280': 'gbk',
'gbk': 'gbk',
'greek': 'iso8859-7',
'greek8': 'iso8859-7',
'hebrew': 'iso8859-8',
'hproman8': 'hp-roman8',
'hzgb2312': 'hz',
'ibm037': 'cp037',
'ibm1026': 'cp1026',
'ibm367': 'ascii',
'ibm424': 'cp424',
'ibm437': 'cp437',
'ibm500': 'cp500',
'ibm775': 'cp775',
'ibm819': 'windows-1252',
'ibm850': 'cp850',
'ibm852': 'cp852',
'ibm855': 'cp855',
'ibm857': 'cp857',
'ibm860': 'cp860',
'ibm861': 'cp861',
'ibm862': 'cp862',
'ibm863': 'cp863',
'ibm864': 'cp864',
'ibm865': 'cp865',
'ibm866': 'cp866',
'ibm869': 'cp869',
'iso2022jp': 'iso2022_jp',
'iso2022jp2': 'iso2022_jp_2',
'iso2022kr': 'iso2022_kr',
'iso646irv1991': 'ascii',
'iso646us': 'ascii',
'iso88591': 'windows-1252',
'iso885910': 'iso8859-10',
'iso8859101992': 'iso8859-10',
'iso885911987': 'windows-1252',
'iso885913': 'iso8859-13',
'iso885914': 'iso8859-14',
'iso8859141998': 'iso8859-14',
'iso885915': 'iso8859-15',
'iso885916': 'iso8859-16',
'iso8859162001': 'iso8859-16',
'iso88592': 'iso8859-2',
'iso885921987': 'iso8859-2',
'iso88593': 'iso8859-3',
'iso885931988': 'iso8859-3',
'iso88594': 'iso8859-4',
'iso885941988': 'iso8859-4',
'iso88595': 'iso8859-5',
'iso885951988': 'iso8859-5',
'iso88596': 'iso8859-6',
'iso885961987': 'iso8859-6',
'iso88597': 'iso8859-7',
'iso885971987': 'iso8859-7',
'iso88598': 'iso8859-8',
'iso885981988': 'iso8859-8',
'iso88599': 'windows-1254',
'iso885991989': 'windows-1254',
'isoceltic': 'iso8859-14',
'isoir100': 'windows-1252',
'isoir101': 'iso8859-2',
'isoir109': 'iso8859-3',
'isoir110': 'iso8859-4',
'isoir126': 'iso8859-7',
'isoir127': 'iso8859-6',
'isoir138': 'iso8859-8',
'isoir144': 'iso8859-5',
'isoir148': 'windows-1254',
'isoir149': 'cp949',
'isoir157': 'iso8859-10',
'isoir199': 'iso8859-14',
'isoir226': 'iso8859-16',
'isoir58': 'gbk',
'isoir6': 'ascii',
'koi8r': 'koi8-r',
'koi8u': 'koi8-u',
'korean': 'cp949',
'ksc5601': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'l1': 'windows-1252',
'l10': 'iso8859-16',
'l2': 'iso8859-2',
'l3': 'iso8859-3',
'l4': 'iso8859-4',
'l5': 'windows-1254',
'l6': 'iso8859-10',
'l8': 'iso8859-14',
'latin1': 'windows-1252',
'latin10': 'iso8859-16',
'latin2': 'iso8859-2',
'latin3': 'iso8859-3',
'latin4': 'iso8859-4',
'latin5': 'windows-1254',
'latin6': 'iso8859-10',
'latin8': 'iso8859-14',
'latin9': 'iso8859-15',
'ms936': 'gbk',
'mskanji': 'shift_jis',
'pt154': 'ptcp154',
'ptcp154': 'ptcp154',
'r8': 'hp-roman8',
'roman8': 'hp-roman8',
'shiftjis': 'shift_jis',
'tis620': 'cp874',
'unicode11utf7': 'utf-7',
'us': 'ascii',
'usascii': 'ascii',
'utf16': 'utf-16',
'utf16be': 'utf-16-be',
'utf16le': 'utf-16-le',
'utf8': 'utf-8',
'windows1250': 'cp1250',
'windows1251': 'cp1251',
'windows1252': 'cp1252',
'windows1253': 'cp1253',
'windows1254': 'cp1254',
'windows1255': 'cp1255',
'windows1256': 'cp1256',
'windows1257': 'cp1257',
'windows1258': 'cp1258',
'windows936': 'gbk',
'x-x-big5': 'big5'}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]))
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
pass
class ReparseException(Exception):
pass
| mpl-2.0 |
99designs/colorific | setup.py | 1 | 1724 | # -*- coding: utf-8 -*-
#
# setup.py
# colorific
#
"""
Package information for colorific.
"""
import os.path
import platform
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
PROJECT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__)))
README_PATH = os.path.join(PROJECT_DIR, 'README.rst')
REQUIREMENTS_PATH = os.path.join(PROJECT_DIR, 'requirements_py{0}.pip'
.format(platform.python_version_tuple()[0]))
VERSION = __import__('colorific').get_version()
install_requires = [
'Pillow>=2.6.1',
'colormath>=2.0.2',
'numpy>=1.9.0',
]
if platform.python_version_tuple() < ('3', '2'):
install_requires.append('backports.functools_lru_cache>=1.0.1')
setup(
name='colorific',
version=VERSION,
description='Automatic color palette detection',
long_description=open(README_PATH).read(),
author='Lars Yencken',
author_email='[email protected]',
url='http://github.com/99designs/colorific',
license='ISC',
packages=['colorific'],
zip_safe=False,
install_requires=install_requires,
entry_points={'console_scripts': ['colorific = colorific.script:main']},
test_suite='tests',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: ISC License (ISCL)',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| isc |
hdinsight/hue | desktop/core/ext-py/elementtree/elementtree/__init__.py | 127 | 1491 | # $Id: __init__.py 1821 2004-06-03 16:57:49Z fredrik $
# elementtree package
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
| apache-2.0 |
haojunyu/numpy | numpy/ma/mrecords.py | 90 | 27383 | """:mod:`numpy.ma..mrecords`
Defines the equivalent of :class:`numpy.recarrays` for masked arrays,
where fields can be accessed as attributes.
Note that :class:`numpy.ma.MaskedArray` already supports structured datatypes
and the masking of individual fields.
.. moduleauthor:: Pierre Gerard-Marchant
"""
from __future__ import division, absolute_import, print_function
# We should make sure that no field is called '_mask','mask','_fieldmask',
# or whatever restricted keywords. An idea would be to no bother in the
# first place, and then rename the invalid fields with a trailing
# underscore. Maybe we could just overload the parser function ?
import sys
import warnings
import numpy as np
import numpy.core.numerictypes as ntypes
from numpy.compat import basestring
from numpy import (
bool_, dtype, ndarray, recarray, array as narray
)
from numpy.core.records import (
fromarrays as recfromarrays, fromrecords as recfromrecords
)
_byteorderconv = np.core.records._byteorderconv
_typestr = ntypes._typestr
import numpy.ma as ma
from numpy.ma import (
MAError, MaskedArray, masked, nomask, masked_array, getdata,
getmaskarray, filled
)
_check_fill_value = ma.core._check_fill_value
__all__ = [
'MaskedRecords', 'mrecarray', 'fromarrays', 'fromrecords',
'fromtextfile', 'addfield',
]
reserved_fields = ['_data', '_mask', '_fieldmask', 'dtype']
def _getformats(data):
"""
Returns the formats of arrays in arraylist as a comma-separated string.
"""
if hasattr(data, 'dtype'):
return ",".join([desc[1] for desc in data.dtype.descr])
formats = ''
for obj in data:
obj = np.asarray(obj)
formats += _typestr[obj.dtype.type]
if issubclass(obj.dtype.type, ntypes.flexible):
formats += repr(obj.itemsize)
formats += ','
return formats[:-1]
def _checknames(descr, names=None):
"""
Checks that field names ``descr`` are not reserved keywords.
If this is the case, a default 'f%i' is substituted. If the argument
`names` is not None, updates the field names to valid names.
"""
ndescr = len(descr)
default_names = ['f%i' % i for i in range(ndescr)]
if names is None:
new_names = default_names
else:
if isinstance(names, (tuple, list)):
new_names = names
elif isinstance(names, str):
new_names = names.split(',')
else:
raise NameError("illegal input names %s" % repr(names))
nnames = len(new_names)
if nnames < ndescr:
new_names += default_names[nnames:]
ndescr = []
for (n, d, t) in zip(new_names, default_names, descr.descr):
if n in reserved_fields:
if t[0] in reserved_fields:
ndescr.append((d, t[1]))
else:
ndescr.append(t)
else:
ndescr.append((n, t[1]))
return np.dtype(ndescr)
def _get_fieldmask(self):
mdescr = [(n, '|b1') for n in self.dtype.names]
fdmask = np.empty(self.shape, dtype=mdescr)
fdmask.flat = tuple([False] * len(mdescr))
return fdmask
class MaskedRecords(MaskedArray, object):
"""
Attributes
----------
_data : recarray
Underlying data, as a record array.
_mask : boolean array
Mask of the records. A record is masked when all its fields are
masked.
_fieldmask : boolean recarray
Record array of booleans, setting the mask of each individual field
of each record.
_fill_value : record
Filling values for each field.
"""
def __new__(cls, shape, dtype=None, buf=None, offset=0, strides=None,
formats=None, names=None, titles=None,
byteorder=None, aligned=False,
mask=nomask, hard_mask=False, fill_value=None, keep_mask=True,
copy=False,
**options):
self = recarray.__new__(cls, shape, dtype=dtype, buf=buf, offset=offset,
strides=strides, formats=formats, names=names,
titles=titles, byteorder=byteorder,
aligned=aligned,)
mdtype = ma.make_mask_descr(self.dtype)
if mask is nomask or not np.size(mask):
if not keep_mask:
self._mask = tuple([False] * len(mdtype))
else:
mask = np.array(mask, copy=copy)
if mask.shape != self.shape:
(nd, nm) = (self.size, mask.size)
if nm == 1:
mask = np.resize(mask, self.shape)
elif nm == nd:
mask = np.reshape(mask, self.shape)
else:
msg = "Mask and data not compatible: data size is %i, " + \
"mask size is %i."
raise MAError(msg % (nd, nm))
copy = True
if not keep_mask:
self.__setmask__(mask)
self._sharedmask = True
else:
if mask.dtype == mdtype:
_mask = mask
else:
_mask = np.array([tuple([m] * len(mdtype)) for m in mask],
dtype=mdtype)
self._mask = _mask
return self
def __array_finalize__(self, obj):
# Make sure we have a _fieldmask by default
_mask = getattr(obj, '_mask', None)
if _mask is None:
objmask = getattr(obj, '_mask', nomask)
_dtype = ndarray.__getattribute__(self, 'dtype')
if objmask is nomask:
_mask = ma.make_mask_none(self.shape, dtype=_dtype)
else:
mdescr = ma.make_mask_descr(_dtype)
_mask = narray([tuple([m] * len(mdescr)) for m in objmask],
dtype=mdescr).view(recarray)
# Update some of the attributes
_dict = self.__dict__
_dict.update(_mask=_mask)
self._update_from(obj)
if _dict['_baseclass'] == ndarray:
_dict['_baseclass'] = recarray
return
def _getdata(self):
"""
Returns the data as a recarray.
"""
return ndarray.view(self, recarray)
_data = property(fget=_getdata)
def _getfieldmask(self):
"""
Alias to mask.
"""
return self._mask
_fieldmask = property(fget=_getfieldmask)
def __len__(self):
"""
Returns the length
"""
# We have more than one record
if self.ndim:
return len(self._data)
# We have only one record: return the nb of fields
return len(self.dtype)
def __getattribute__(self, attr):
try:
return object.__getattribute__(self, attr)
except AttributeError:
# attr must be a fieldname
pass
fielddict = ndarray.__getattribute__(self, 'dtype').fields
try:
res = fielddict[attr][:2]
except (TypeError, KeyError):
raise AttributeError("record array has no attribute %s" % attr)
# So far, so good
_localdict = ndarray.__getattribute__(self, '__dict__')
_data = ndarray.view(self, _localdict['_baseclass'])
obj = _data.getfield(*res)
if obj.dtype.fields:
raise NotImplementedError("MaskedRecords is currently limited to"
"simple records.")
# Get some special attributes
# Reset the object's mask
hasmasked = False
_mask = _localdict.get('_mask', None)
if _mask is not None:
try:
_mask = _mask[attr]
except IndexError:
# Couldn't find a mask: use the default (nomask)
pass
hasmasked = _mask.view((np.bool, (len(_mask.dtype) or 1))).any()
if (obj.shape or hasmasked):
obj = obj.view(MaskedArray)
obj._baseclass = ndarray
obj._isfield = True
obj._mask = _mask
# Reset the field values
_fill_value = _localdict.get('_fill_value', None)
if _fill_value is not None:
try:
obj._fill_value = _fill_value[attr]
except ValueError:
obj._fill_value = None
else:
obj = obj.item()
return obj
def __setattr__(self, attr, val):
"""
Sets the attribute attr to the value val.
"""
# Should we call __setmask__ first ?
if attr in ['mask', 'fieldmask']:
self.__setmask__(val)
return
# Create a shortcut (so that we don't have to call getattr all the time)
_localdict = object.__getattribute__(self, '__dict__')
# Check whether we're creating a new field
newattr = attr not in _localdict
try:
# Is attr a generic attribute ?
ret = object.__setattr__(self, attr, val)
except:
# Not a generic attribute: exit if it's not a valid field
fielddict = ndarray.__getattribute__(self, 'dtype').fields or {}
optinfo = ndarray.__getattribute__(self, '_optinfo') or {}
if not (attr in fielddict or attr in optinfo):
exctype, value = sys.exc_info()[:2]
raise exctype(value)
else:
# Get the list of names
fielddict = ndarray.__getattribute__(self, 'dtype').fields or {}
# Check the attribute
if attr not in fielddict:
return ret
if newattr:
# We just added this one or this setattr worked on an
# internal attribute.
try:
object.__delattr__(self, attr)
except:
return ret
# Let's try to set the field
try:
res = fielddict[attr][:2]
except (TypeError, KeyError):
raise AttributeError("record array has no attribute %s" % attr)
if val is masked:
_fill_value = _localdict['_fill_value']
if _fill_value is not None:
dval = _localdict['_fill_value'][attr]
else:
dval = val
mval = True
else:
dval = filled(val)
mval = getmaskarray(val)
obj = ndarray.__getattribute__(self, '_data').setfield(dval, *res)
_localdict['_mask'].__setitem__(attr, mval)
return obj
def __getitem__(self, indx):
"""
Returns all the fields sharing the same fieldname base.
The fieldname base is either `_data` or `_mask`.
"""
_localdict = self.__dict__
_mask = ndarray.__getattribute__(self, '_mask')
_data = ndarray.view(self, _localdict['_baseclass'])
# We want a field
if isinstance(indx, basestring):
# Make sure _sharedmask is True to propagate back to _fieldmask
# Don't use _set_mask, there are some copies being made that
# break propagation Don't force the mask to nomask, that wreaks
# easy masking
obj = _data[indx].view(MaskedArray)
obj._mask = _mask[indx]
obj._sharedmask = True
fval = _localdict['_fill_value']
if fval is not None:
obj._fill_value = fval[indx]
# Force to masked if the mask is True
if not obj.ndim and obj._mask:
return masked
return obj
# We want some elements.
# First, the data.
obj = np.array(_data[indx], copy=False).view(mrecarray)
obj._mask = np.array(_mask[indx], copy=False).view(recarray)
return obj
def __setitem__(self, indx, value):
"""
Sets the given record to value.
"""
MaskedArray.__setitem__(self, indx, value)
if isinstance(indx, basestring):
self._mask[indx] = ma.getmaskarray(value)
def __str__(self):
"""
Calculates the string representation.
"""
if self.size > 1:
mstr = ["(%s)" % ",".join([str(i) for i in s])
for s in zip(*[getattr(self, f) for f in self.dtype.names])]
return "[%s]" % ", ".join(mstr)
else:
mstr = ["%s" % ",".join([str(i) for i in s])
for s in zip([getattr(self, f) for f in self.dtype.names])]
return "(%s)" % ", ".join(mstr)
def __repr__(self):
"""
Calculates the repr representation.
"""
_names = self.dtype.names
fmt = "%%%is : %%s" % (max([len(n) for n in _names]) + 4,)
reprstr = [fmt % (f, getattr(self, f)) for f in self.dtype.names]
reprstr.insert(0, 'masked_records(')
reprstr.extend([fmt % (' fill_value', self.fill_value),
' )'])
return str("\n".join(reprstr))
def view(self, dtype=None, type=None):
"""
Returns a view of the mrecarray.
"""
# OK, basic copy-paste from MaskedArray.view.
if dtype is None:
if type is None:
output = ndarray.view(self)
else:
output = ndarray.view(self, type)
# Here again.
elif type is None:
try:
if issubclass(dtype, ndarray):
output = ndarray.view(self, dtype)
dtype = None
else:
output = ndarray.view(self, dtype)
# OK, there's the change
except TypeError:
dtype = np.dtype(dtype)
# we need to revert to MaskedArray, but keeping the possibility
# of subclasses (eg, TimeSeriesRecords), so we'll force a type
# set to the first parent
if dtype.fields is None:
basetype = self.__class__.__bases__[0]
output = self.__array__().view(dtype, basetype)
output._update_from(self)
else:
output = ndarray.view(self, dtype)
output._fill_value = None
else:
output = ndarray.view(self, dtype, type)
# Update the mask, just like in MaskedArray.view
if (getattr(output, '_mask', nomask) is not nomask):
mdtype = ma.make_mask_descr(output.dtype)
output._mask = self._mask.view(mdtype, ndarray)
output._mask.shape = output.shape
return output
def harden_mask(self):
"""
Forces the mask to hard.
"""
self._hardmask = True
def soften_mask(self):
"""
Forces the mask to soft
"""
self._hardmask = False
def copy(self):
"""
Returns a copy of the masked record.
"""
copied = self._data.copy().view(type(self))
copied._mask = self._mask.copy()
return copied
def tolist(self, fill_value=None):
"""
Return the data portion of the array as a list.
Data items are converted to the nearest compatible Python type.
Masked values are converted to fill_value. If fill_value is None,
the corresponding entries in the output list will be ``None``.
"""
if fill_value is not None:
return self.filled(fill_value).tolist()
result = narray(self.filled().tolist(), dtype=object)
mask = narray(self._mask.tolist())
result[mask] = None
return result.tolist()
def __getstate__(self):
"""Return the internal state of the masked array.
This is for pickling.
"""
state = (1,
self.shape,
self.dtype,
self.flags.fnc,
self._data.tobytes(),
self._mask.tobytes(),
self._fill_value,
)
return state
def __setstate__(self, state):
"""
Restore the internal state of the masked array.
This is for pickling. ``state`` is typically the output of the
``__getstate__`` output, and is a 5-tuple:
- class name
- a tuple giving the shape of the data
- a typecode for the data
- a binary string for the data
- a binary string for the mask.
"""
(ver, shp, typ, isf, raw, msk, flv) = state
ndarray.__setstate__(self, (shp, typ, isf, raw))
mdtype = dtype([(k, bool_) for (k, _) in self.dtype.descr])
self.__dict__['_mask'].__setstate__((shp, mdtype, isf, msk))
self.fill_value = flv
def __reduce__(self):
"""
Return a 3-tuple for pickling a MaskedArray.
"""
return (_mrreconstruct,
(self.__class__, self._baseclass, (0,), 'b',),
self.__getstate__())
def _mrreconstruct(subtype, baseclass, baseshape, basetype,):
"""
Build a new MaskedArray from the information stored in a pickle.
"""
_data = ndarray.__new__(baseclass, baseshape, basetype).view(subtype)
_mask = ndarray.__new__(ndarray, baseshape, 'b1')
return subtype.__new__(subtype, _data, mask=_mask, dtype=basetype,)
mrecarray = MaskedRecords
###############################################################################
# Constructors #
###############################################################################
def fromarrays(arraylist, dtype=None, shape=None, formats=None,
names=None, titles=None, aligned=False, byteorder=None,
fill_value=None):
"""
Creates a mrecarray from a (flat) list of masked arrays.
Parameters
----------
arraylist : sequence
A list of (masked) arrays. Each element of the sequence is first converted
to a masked array if needed. If a 2D array is passed as argument, it is
processed line by line
dtype : {None, dtype}, optional
Data type descriptor.
shape : {None, integer}, optional
Number of records. If None, shape is defined from the shape of the
first array in the list.
formats : {None, sequence}, optional
Sequence of formats for each individual field. If None, the formats will
be autodetected by inspecting the fields and selecting the highest dtype
possible.
names : {None, sequence}, optional
Sequence of the names of each field.
fill_value : {None, sequence}, optional
Sequence of data to be used as filling values.
Notes
-----
Lists of tuples should be preferred over lists of lists for faster processing.
"""
datalist = [getdata(x) for x in arraylist]
masklist = [np.atleast_1d(getmaskarray(x)) for x in arraylist]
_array = recfromarrays(datalist,
dtype=dtype, shape=shape, formats=formats,
names=names, titles=titles, aligned=aligned,
byteorder=byteorder).view(mrecarray)
_array._mask.flat = list(zip(*masklist))
if fill_value is not None:
_array.fill_value = fill_value
return _array
def fromrecords(reclist, dtype=None, shape=None, formats=None, names=None,
titles=None, aligned=False, byteorder=None,
fill_value=None, mask=nomask):
"""
Creates a MaskedRecords from a list of records.
Parameters
----------
reclist : sequence
A list of records. Each element of the sequence is first converted
to a masked array if needed. If a 2D array is passed as argument, it is
processed line by line
dtype : {None, dtype}, optional
Data type descriptor.
shape : {None,int}, optional
Number of records. If None, ``shape`` is defined from the shape of the
first array in the list.
formats : {None, sequence}, optional
Sequence of formats for each individual field. If None, the formats will
be autodetected by inspecting the fields and selecting the highest dtype
possible.
names : {None, sequence}, optional
Sequence of the names of each field.
fill_value : {None, sequence}, optional
Sequence of data to be used as filling values.
mask : {nomask, sequence}, optional.
External mask to apply on the data.
Notes
-----
Lists of tuples should be preferred over lists of lists for faster processing.
"""
# Grab the initial _fieldmask, if needed:
_mask = getattr(reclist, '_mask', None)
# Get the list of records.
if isinstance(reclist, ndarray):
# Make sure we don't have some hidden mask
if isinstance(reclist, MaskedArray):
reclist = reclist.filled().view(ndarray)
# Grab the initial dtype, just in case
if dtype is None:
dtype = reclist.dtype
reclist = reclist.tolist()
mrec = recfromrecords(reclist, dtype=dtype, shape=shape, formats=formats,
names=names, titles=titles,
aligned=aligned, byteorder=byteorder).view(mrecarray)
# Set the fill_value if needed
if fill_value is not None:
mrec.fill_value = fill_value
# Now, let's deal w/ the mask
if mask is not nomask:
mask = np.array(mask, copy=False)
maskrecordlength = len(mask.dtype)
if maskrecordlength:
mrec._mask.flat = mask
elif len(mask.shape) == 2:
mrec._mask.flat = [tuple(m) for m in mask]
else:
mrec.__setmask__(mask)
if _mask is not None:
mrec._mask[:] = _mask
return mrec
def _guessvartypes(arr):
"""
Tries to guess the dtypes of the str_ ndarray `arr`.
Guesses by testing element-wise conversion. Returns a list of dtypes.
The array is first converted to ndarray. If the array is 2D, the test
is performed on the first line. An exception is raised if the file is
3D or more.
"""
vartypes = []
arr = np.asarray(arr)
if len(arr.shape) == 2:
arr = arr[0]
elif len(arr.shape) > 2:
raise ValueError("The array should be 2D at most!")
# Start the conversion loop.
for f in arr:
try:
int(f)
except ValueError:
try:
float(f)
except ValueError:
try:
complex(f)
except ValueError:
vartypes.append(arr.dtype)
else:
vartypes.append(np.dtype(complex))
else:
vartypes.append(np.dtype(float))
else:
vartypes.append(np.dtype(int))
return vartypes
def openfile(fname):
"""
Opens the file handle of file `fname`.
"""
# A file handle
if hasattr(fname, 'readline'):
return fname
# Try to open the file and guess its type
try:
f = open(fname)
except IOError:
raise IOError("No such file: '%s'" % fname)
if f.readline()[:2] != "\\x":
f.seek(0, 0)
return f
f.close()
raise NotImplementedError("Wow, binary file")
def fromtextfile(fname, delimitor=None, commentchar='#', missingchar='',
varnames=None, vartypes=None):
"""
Creates a mrecarray from data stored in the file `filename`.
Parameters
----------
fname : {file name/handle}
Handle of an opened file.
delimitor : {None, string}, optional
Alphanumeric character used to separate columns in the file.
If None, any (group of) white spacestring(s) will be used.
commentchar : {'#', string}, optional
Alphanumeric character used to mark the start of a comment.
missingchar : {'', string}, optional
String indicating missing data, and used to create the masks.
varnames : {None, sequence}, optional
Sequence of the variable names. If None, a list will be created from
the first non empty line of the file.
vartypes : {None, sequence}, optional
Sequence of the variables dtypes. If None, it will be estimated from
the first non-commented line.
Ultra simple: the varnames are in the header, one line"""
# Try to open the file.
ftext = openfile(fname)
# Get the first non-empty line as the varnames
while True:
line = ftext.readline()
firstline = line[:line.find(commentchar)].strip()
_varnames = firstline.split(delimitor)
if len(_varnames) > 1:
break
if varnames is None:
varnames = _varnames
# Get the data.
_variables = masked_array([line.strip().split(delimitor) for line in ftext
if line[0] != commentchar and len(line) > 1])
(_, nfields) = _variables.shape
ftext.close()
# Try to guess the dtype.
if vartypes is None:
vartypes = _guessvartypes(_variables[0])
else:
vartypes = [np.dtype(v) for v in vartypes]
if len(vartypes) != nfields:
msg = "Attempting to %i dtypes for %i fields!"
msg += " Reverting to default."
warnings.warn(msg % (len(vartypes), nfields))
vartypes = _guessvartypes(_variables[0])
# Construct the descriptor.
mdescr = [(n, f) for (n, f) in zip(varnames, vartypes)]
mfillv = [ma.default_fill_value(f) for f in vartypes]
# Get the data and the mask.
# We just need a list of masked_arrays. It's easier to create it like that:
_mask = (_variables.T == missingchar)
_datalist = [masked_array(a, mask=m, dtype=t, fill_value=f)
for (a, m, t, f) in zip(_variables.T, _mask, vartypes, mfillv)]
return fromarrays(_datalist, dtype=mdescr)
def addfield(mrecord, newfield, newfieldname=None):
"""Adds a new field to the masked record array
Uses `newfield` as data and `newfieldname` as name. If `newfieldname`
is None, the new field name is set to 'fi', where `i` is the number of
existing fields.
"""
_data = mrecord._data
_mask = mrecord._mask
if newfieldname is None or newfieldname in reserved_fields:
newfieldname = 'f%i' % len(_data.dtype)
newfield = ma.array(newfield)
# Get the new data.
# Create a new empty recarray
newdtype = np.dtype(_data.dtype.descr + [(newfieldname, newfield.dtype)])
newdata = recarray(_data.shape, newdtype)
# Add the exisintg field
[newdata.setfield(_data.getfield(*f), *f)
for f in _data.dtype.fields.values()]
# Add the new field
newdata.setfield(newfield._data, *newdata.dtype.fields[newfieldname])
newdata = newdata.view(MaskedRecords)
# Get the new mask
# Create a new empty recarray
newmdtype = np.dtype([(n, bool_) for n in newdtype.names])
newmask = recarray(_data.shape, newmdtype)
# Add the old masks
[newmask.setfield(_mask.getfield(*f), *f)
for f in _mask.dtype.fields.values()]
# Add the mask of the new field
newmask.setfield(getmaskarray(newfield),
*newmask.dtype.fields[newfieldname])
newdata._mask = newmask
return newdata
| bsd-3-clause |
ewjoachim/dropbox-sdk-python | dropbox/session.py | 3 | 13077 | import pkg_resources
import six
import ssl
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
_TRUSTED_CERT_FILE = pkg_resources.resource_filename(__name__, 'trusted-certs.crt')
# TODO(kelkabany): We probably only want to instantiate this once so that even
# if multiple Dropbox objects are instantiated, they all share the same pool.
class _SSLAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block=False):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=_TRUSTED_CERT_FILE,
ssl_version=ssl.PROTOCOL_TLSv1)
def pinned_session(pool_maxsize=8):
http_adapter = _SSLAdapter(pool_connections=4,
pool_maxsize=pool_maxsize)
_session = requests.session()
_session.mount('https://', http_adapter)
return _session
"""
Deprecated: The code below is included only to support the use of the old v1
client class. It will be removed once v2 is at parity with v1. Do not use this
for any new functionality.
"""
import random
import six
import sys
import time
import urllib
try:
from urlparse import parse_qs
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
from . import rest
if six.PY3:
url_path_quote = urllib.parse.quote
url_encode = urllib.parse.urlencode
else:
url_path_quote = urllib.quote
url_encode = urllib.urlencode
class OAuthToken(object):
"""
A class representing an OAuth token. Contains two fields: ``key`` and
``secret``.
"""
def __init__(self, key, secret):
self.key = key
self.secret = secret
class BaseSession(object):
API_VERSION = 1
API_HOST = "api.dropbox.com"
WEB_HOST = "www.dropbox.com"
API_CONTENT_HOST = "api-content.dropbox.com"
API_NOTIFICATION_HOST = "api-notify.dropbox.com"
def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
"""Initialize a DropboxSession object.
Your consumer key and secret are available
at https://www.dropbox.com/developers/apps
Args:
- ``access_type``: Either 'auto' (the default), 'dropbox', or
'app_folder'. You probably don't need to specify this and should
just use the default.
- ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
The locale setting will be used to translate any user-facing error
messages that the server generates. At this time Dropbox supports
'en', 'es', 'fr', 'de', and 'ja', though we will be supporting more
languages in the future. If you send a language the server doesn't
support, messages will remain in English. Look for these translated
messages in rest.ErrorResponse exceptions as e.user_error_msg.
"""
assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
self.token = None
self.request_token = None
self.root = 'sandbox' if access_type == 'app_folder' else access_type
self.locale = locale
self.rest_client = rest_client
def is_linked(self):
"""Return whether the DropboxSession has an access token attached."""
return bool(self.token)
def unlink(self):
"""Remove any attached access token from the DropboxSession."""
self.token = None
def build_path(self, target, params=None):
"""Build the path component for an API URL.
This method urlencodes the parameters, adds them
to the end of the target url, and puts a marker for the API
version in front.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The path and parameters components of an API URL.
"""
if six.PY2 and isinstance(target, six.text_type):
target = target.encode("utf8")
target_path = url_path_quote(target)
params = params or {}
params = params.copy()
if self.locale:
params['locale'] = self.locale
if params:
return "/%s%s?%s" % (self.API_VERSION, target_path, url_encode(params))
else:
return "/%s%s" % (self.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
This method adds scheme and hostname to the path
returned from build_path.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The full API URL.
"""
return "https://%s%s" % (host, self.build_path(target, params))
class DropboxSession(BaseSession):
def set_token(self, access_token, access_token_secret):
"""Attach an access token to the DropboxSession.
Note that the access 'token' is made up of both a token string
and a secret string.
"""
self.token = OAuthToken(access_token, access_token_secret)
def set_request_token(self, request_token, request_token_secret):
"""Attach an request token to the DropboxSession.
Note that the request 'token' is made up of both a token string
and a secret string.
"""
self.request_token = OAuthToken(request_token, request_token_secret)
def build_authorize_url(self, request_token, oauth_callback=None):
"""Build a request token authorization URL.
After obtaining a request token, you'll need to send the user to
the URL returned from this function so that they can confirm that
they want to connect their account to your app.
Args:
- ``request_token``: A request token from obtain_request_token.
- ``oauth_callback``: A url to redirect back to with the authorized
request token.
Returns:
- An authorization for the given request token.
"""
params = {'oauth_token': request_token.key,
}
if oauth_callback:
params['oauth_callback'] = oauth_callback
return self.build_url(self.WEB_HOST, '/oauth/authorize', params)
def obtain_request_token(self):
"""Obtain a request token from the Dropbox API.
This is your first step in the OAuth process. You call this to get a
request_token from the Dropbox server that you can then use with
DropboxSession.build_authorize_url() to get the user to authorize it.
After it's authorized you use this token with
DropboxSession.obtain_access_token() to get an access token.
NOTE: You should only need to do this once for each user, and then you
can store the access token for that user for later operations.
Returns:
- An :py:class:`OAuthToken` object representing the
request token Dropbox assigned to this app. Also attaches the
request token as self.request_token.
"""
self.token = None # clear any token currently on the request
url = self.build_url(self.API_HOST, '/oauth/request_token')
headers, params = self.build_access_headers('POST', url)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.request_token = self._parse_token(response.read())
return self.request_token
def obtain_access_token(self, request_token=None):
"""Obtain an access token for a user.
After you get a request token, and then send the user to the authorize
URL, you can use the authorized request token with this method to get the
access token to use for future operations. The access token is stored on
the session object.
Args:
- ``request_token``: A request token from obtain_request_token. [optional]
The request_token should have been authorized via the
authorization url from build_authorize_url. If you don't pass
a request_token, the fallback is self.request_token, which
will exist if you previously called obtain_request_token on this
DropboxSession instance.
Returns:
- An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
representing the access token Dropbox assigned to this app and
user. Also attaches the access token as self.token.
"""
request_token = request_token or self.request_token
assert request_token, "No request_token available on the session. Please pass one."
url = self.build_url(self.API_HOST, '/oauth/access_token')
headers, params = self.build_access_headers('POST', url, request_token=request_token)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.token = self._parse_token(response.read())
return self.token
def build_access_headers(self, method, resource_url, params=None, request_token=None):
"""Build OAuth access headers for a future request.
Args:
- ``method``: The HTTP method being used (e.g. 'GET' or 'POST').
- ``resource_url``: The full url the request will be made to.
- ``params``: A dictionary of parameters to add to what's already on the url.
Typically, this would consist of POST parameters.
Returns:
- A tuple of (header_dict, params) where header_dict is a dictionary
of header names and values appropriate for passing into dropbox.rest.RESTClient
and params is a dictionary like the one that was passed in, but augmented with
oauth-related parameters as appropriate.
"""
if params is None:
params = {}
else:
params = params.copy()
oauth_params = {
'oauth_consumer_key' : self.consumer_creds.key,
'oauth_timestamp' : self._generate_oauth_timestamp(),
'oauth_nonce' : self._generate_oauth_nonce(),
'oauth_version' : self._oauth_version(),
}
token = request_token if request_token is not None else self.token
if token:
oauth_params['oauth_token'] = token.key
self._oauth_sign_request(oauth_params, self.consumer_creds, token)
headers = {
'Authorization':
'OAuth %s' % ','.join(
'%s="%s"' % (k, v) for k, v in six.iteritems(oauth_params))}
return headers, params
@classmethod
def _oauth_sign_request(cls, params, consumer_pair, token_pair):
params.update({'oauth_signature_method' : 'PLAINTEXT',
'oauth_signature' : ('%s&%s' % (consumer_pair.secret, token_pair.secret)
if token_pair is not None else
'%s&' % (consumer_pair.secret,))})
@classmethod
def _generate_oauth_timestamp(cls):
return int(time.time())
@classmethod
def _generate_oauth_nonce(cls, length=8):
return ''.join([str(random.randint(0, 9)) for i in range(length)])
@classmethod
def _oauth_version(cls):
return '1.0'
@classmethod
def _parse_token(cls, s):
if not s:
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not params:
raise ValueError("Invalid parameter string: %r" % s)
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
return OAuthToken(key, secret)
# Don't use this class directly.
class DropboxOAuth2Session(BaseSession):
def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
self.access_token = oauth2_access_token
def build_access_headers(self, method, resource_url, params=None, token=None):
assert token is None
headers = {"Authorization": "Bearer " + self.access_token}
return headers, params
| mit |
hutchison/bp_mgmt | bp_cupid/views/Student.py | 1 | 2805 | from django.shortcuts import render, get_object_or_404
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required, user_passes_test
from django.views.generic import View
from ..models import (
Student,
Gewicht,
Platz,
Zeitraum,
)
class StudentDetail(View):
template_name = 'bp_cupid/student.html'
@method_decorator(login_required)
@method_decorator(user_passes_test(lambda u: u.is_staff))
def get(self, request, mat_nr):
"""
Zeigt die Gewichte zu allen Praxen vom ausgewählten Studenten an.
"""
s = get_object_or_404(Student, mat_nr=mat_nr)
akt_verw_zr = request.user.mitarbeiter.akt_verw_zeitraum
aktuelle_zeitraeume = Zeitraum.objects.filter(
block__verwaltungszeitraum=akt_verw_zr,
)
gewichte = Gewicht.objects.filter(
student__mat_nr=mat_nr
).prefetch_related('praxis__freie_zeitraeume').order_by('-wert')
try:
platz = Platz.objects.select_related('praxis').get(student=s)
except Platz.DoesNotExist:
platz = None
context = {
'student': s,
'gewichte': gewichte,
'platz': platz,
'aktuelle_zeitraeume': aktuelle_zeitraeume,
}
return render(request, self.template_name, context)
class StudentList(View):
template_name = 'bp_cupid/studenten.html'
@method_decorator(login_required)
@method_decorator(user_passes_test(lambda u: u.is_staff))
def get(self, request):
"""
Zeigt alle Studenten in einer Tabelle nach Nachname sortiert an.
"""
akt_verw_zr = request.user.mitarbeiter.akt_verw_zeitraum
context = {
'studenten': Student.objects.filter(
verwaltungszeitraum=akt_verw_zr
).select_related(
'platz'
).prefetch_related(
'landkreise',
'bevorzugte_praxen',
).order_by('name'),
}
anz_studis = Student.objects.filter(
verwaltungszeitraum=akt_verw_zr
).count()
if anz_studis:
anz_studis_mit_fragebogen = Student.objects.filter(
verwaltungszeitraum=akt_verw_zr,
hat_fragebogen_ausgefuellt=True,
).count()
rel_fragebogen = round(
100 * anz_studis_mit_fragebogen / anz_studis, 1
)
context.update(
{
'anz_studis': anz_studis,
'anz_studis_mit_fragebogen': anz_studis_mit_fragebogen,
'rel_fragebogen': rel_fragebogen,
}
)
return render(request, self.template_name, context)
| agpl-3.0 |
dmargala/qusp | examples/compare_delta.py | 1 | 7364 | #!/usr/bin/env python
import argparse
import numpy as np
import numpy.ma as ma
import h5py
import qusp
import matplotlib.pyplot as plt
import scipy.interpolate
import fitsio
class DeltaLOS(object):
def __init__(self, thing_id):
path = '/data/lya/deltas/delta-%d.fits' % thing_id
hdulist = fitsio.FITS(path, mode=fitsio.READONLY)
self.pmf = hdulist[1].read_header()['pmf']
self.loglam = hdulist[1]['loglam'][:]
self.wave = np.power(10.0, self.loglam)
self.delta = hdulist[1]['delta'][:]
self.weight = hdulist[1]['weight'][:]
self.cont = hdulist[1]['cont'][:]
self.msha = hdulist[1]['msha'][:]
self.mabs = hdulist[1]['mabs'][:]
self.ivar = hdulist[1]['ivar'][:]
self.cf = self.cont*self.msha*self.mabs
self.flux = (1+self.delta)*self.cf
def main():
# parse command-line arguments
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--verbose", action="store_true",
help="print verbose output")
## targets to fit
parser.add_argument("--name", type=str, default=None,
help="target list")
parser.add_argument("--gamma", type=float, default=3.8,
help="LSS growth and redshift evolution of mean absorption gamma")
parser.add_argument("--index", type=int, default=1000,
help="target index")
parser.add_argument("--pmf", type=str, default=None,
help="target plate-mjd-fiber string")
args = parser.parse_args()
print 'Loading forest data...'
# import data
skim = h5py.File(args.name+'.hdf5', 'r')
if args.pmf:
plate, mjd, fiber = [int(val) for val in args.pmf.split('-')]
index = np.where((skim['meta']['plate'] == plate) & (skim['meta']['mjd'] == mjd) & (skim['meta']['fiber'] == fiber))[0][0]
else:
index = args.index
flux = np.ma.MaskedArray(skim['flux'][index], mask=skim['mask'][index])
ivar = np.ma.MaskedArray(skim['ivar'][index], mask=skim['mask'][index])
loglam = skim['loglam'][:]
wave = np.power(10.0, loglam)
z = skim['z'][index]
norm = skim['norm'][index]
meta = skim['meta'][index]
linear_continuum = h5py.File(args.name+'-linear-continuum.hdf5', 'r')
a = linear_continuum['params_a'][index]
b = linear_continuum['params_b'][index]
continuum = linear_continuum['continuum']
continuum_wave = linear_continuum['continuum_wave']
continuum_interp = scipy.interpolate.UnivariateSpline(continuum_wave, continuum, ext=1, s=0)
abs_alpha = linear_continuum.attrs['abs_alpha']
abs_beta = linear_continuum.attrs['abs_beta']
forest_wave_ref = (1+z)*linear_continuum.attrs['forest_wave_ref']
wave_lya = linear_continuum.attrs['wave_lya']
forest_pixel_redshifts = wave/wave_lya - 1
abs_coefs = abs_alpha*np.power(1+forest_pixel_redshifts, abs_beta)
print 'flux 1280 Ang: %.2f' % norm
print 'fit param a: %.2f' % a
print 'fit param b: %.2f' % b
def model_flux(a, b):
return a*np.power(wave/forest_wave_ref, b)*continuum_interp(wave/(1+z))*np.exp(-abs_coefs)
def chisq(p):
mflux = model_flux(p[0], p[1])
res = flux - mflux
return ma.sum(res*res*ivar)/ma.sum(ivar)
from scipy.optimize import minimize
result = minimize(chisq, (a, b))
a,b = result.x
print 'fit param a: %.2f' % a
print 'fit param b: %.2f' % b
# rest and obs refer to pixel grid
print 'Estimating deltas in forest frame...'
mflux = model_flux(a,b)
delta_flux = flux/mflux - 1.0
delta_ivar = ivar*mflux*mflux
forest_min_z = linear_continuum.attrs['forest_min_z']
forest_max_z = linear_continuum.attrs['forest_max_z']
forest_dz = 0.1
forest_z_bins = np.arange(forest_min_z, forest_max_z + forest_dz, forest_dz)
print 'Adjusting weights for pipeline variance and LSS variance...'
var_lss = scipy.interpolate.UnivariateSpline(forest_z_bins, 0.05 + 0.06*(forest_z_bins - 2.0)**2, s=0)
var_pipe_scale = scipy.interpolate.UnivariateSpline(forest_z_bins, 0.7 + 0.2*(forest_z_bins - 2.0)**2, s=0)
delta_weight = delta_ivar*var_pipe_scale(forest_pixel_redshifts)
delta_weight = delta_weight/(1 + delta_weight*var_lss(forest_pixel_redshifts))
thing_id = meta['thing_id']
pmf = '%s-%s-%s' % (meta['plate'],meta['mjd'],meta['fiber'])
los = DeltaLOS(thing_id)
my_msha = norm*a*np.power(wave/forest_wave_ref, b)
my_wave = wave
my_flux = norm*flux
my_cf = my_msha*continuum_interp(wave/(1+z))*np.exp(-abs_coefs)
my_ivar = ivar/(norm*norm)
my_delta = delta_flux
my_weight = delta_weight
# mean_ratio = np.average(my_msha*continuum)/ma.average(los.msha*los.cont)
# print mean_ratio
plt.figure(figsize=(12,4))
plt.plot(my_wave, my_flux, color='gray')
my_dflux = ma.power(my_ivar, -0.5)
plt.fill_between(my_wave, my_flux - my_dflux, my_flux + my_dflux, color='gray', alpha=0.5)
plt.plot(my_wave, my_msha*continuum_interp(wave/(1+z)), label='My continuum', color='blue')
plt.plot(los.wave, los.cont, label='Busca continuum', color='red')
plt.plot(my_wave, my_cf, label='My cf', color='green')
plt.plot(los.wave, los.cf, label='Busca cf', color='orange')
plt.legend()
plt.title(r'%s (%s), $z$ = %.2f' % (pmf, thing_id, z))
plt.xlabel(r'Observed Wavelength ($\AA$)')
plt.ylabel(r'Observed Flux')
plt.xlim(los.wave[[0,-1]])
plt.savefig(args.name+'-example-flux.png', dpi=100, bbox_inches='tight')
plt.close()
plt.figure(figsize=(12,4))
my_delta_sigma = ma.power(delta_weight, -0.5)
# plt.fill_between(my_wave, my_delta - my_delta_sigma, my_delta + my_delta_sigma, color='blue', alpha=0.1, label='My Delta')
plt.scatter(my_wave, my_delta, color='blue', marker='+', label='My Delta')
plt.plot(my_wave, +my_delta_sigma, color='blue', ls=':')
plt.plot(my_wave, -my_delta_sigma, color='blue', ls=':')
los_delta_sigma = ma.power(los.weight, -0.5)
# plt.fill_between(los.wave, los.delta - los_delta_sigma, los.delta + los_delta_sigma, color='red', alpha=01, label='Busca Delta')
plt.scatter(los.wave, los.delta, color='red', marker='+', label='Busca Delta')
plt.plot(los.wave, +los_delta_sigma, color='red', ls=':')
plt.plot(los.wave, -los_delta_sigma, color='red', ls=':')
my_lss_sigma = np.sqrt(var_lss(forest_pixel_redshifts))
plt.plot(my_wave, +my_lss_sigma, color='black', ls='--')
plt.plot(my_wave, -my_lss_sigma, color='black', ls='--')
# my_sn_sigma = np.sqrt(np.power(1 + forest_pixel_redshifts, 0.5*abs_beta))/10
# plt.plot(my_wave, +my_sn_sigma, color='orange', ls='--')
# plt.plot(my_wave, -my_sn_sigma, color='orange', ls='--')
# import matplotlib.patches as mpatches
#
# blue_patch = mpatches.Patch(color='blue', alpha=0.3, label='My Delta')
# red_patch = mpatches.Patch(color='red', alpha=0.3, label='Busca Delta')
# plt.legend(handles=[blue_patch,red_patch])
plt.title(r'%s (%s), $z$ = %.2f' % (pmf, thing_id, z))
plt.ylim(-2,2)
plt.xlim(los.wave[[0,-1]])
plt.xlabel(r'Observed Wavelength ($\AA$)')
plt.ylabel(r'Delta')
plt.legend()
plt.savefig(args.name+'-example-delta.png', dpi=100, bbox_inches='tight')
plt.close()
if __name__ == '__main__':
main()
| mit |
pjryan126/solid-start-careers | store/api/glassdoor/venv/lib/python2.7/site-packages/requests/packages/chardet/codingstatemachine.py | 2931 | 2318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart
from .compat import wrap_ord
class CodingStateMachine:
def __init__(self, sm):
self._mModel = sm
self._mCurrentBytePos = 0
self._mCurrentCharLen = 0
self.reset()
def reset(self):
self._mCurrentState = eStart
def next_state(self, c):
# for each byte we get its class
# if it is first byte, we also get byte length
# PY3K: aBuf is a byte stream, so c is an int, not a byte
byteCls = self._mModel['classTable'][wrap_ord(c)]
if self._mCurrentState == eStart:
self._mCurrentBytePos = 0
self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
# from byte's class and stateTable, we get its next state
curr_state = (self._mCurrentState * self._mModel['classFactor']
+ byteCls)
self._mCurrentState = self._mModel['stateTable'][curr_state]
self._mCurrentBytePos += 1
return self._mCurrentState
def get_current_charlen(self):
return self._mCurrentCharLen
def get_coding_state_machine(self):
return self._mModel['name']
| gpl-2.0 |
edxnercel/edx-platform | lms/djangoapps/lti_provider/users.py | 80 | 5166 | """
LTI user management functionality. This module reconciles the two identities
that an individual has in the campus LMS platform and on edX.
"""
import string
import random
import uuid
from django.conf import settings
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.db import IntegrityError
from lti_provider.models import LtiUser
from student.models import UserProfile
def authenticate_lti_user(request, lti_user_id, lti_consumer):
"""
Determine whether the user specified by the LTI launch has an existing
account. If not, create a new Django User model and associate it with an
LtiUser object.
If the currently logged-in user does not match the user specified by the LTI
launch, log out the old user and log in the LTI identity.
"""
try:
lti_user = LtiUser.objects.get(
lti_user_id=lti_user_id,
lti_consumer=lti_consumer
)
except LtiUser.DoesNotExist:
# This is the first time that the user has been here. Create an account.
lti_user = create_lti_user(lti_user_id, lti_consumer)
if not (request.user.is_authenticated() and
request.user == lti_user.edx_user):
# The user is not authenticated, or is logged in as somebody else.
# Switch them to the LTI user
switch_user(request, lti_user, lti_consumer)
def create_lti_user(lti_user_id, lti_consumer):
"""
Generate a new user on the edX platform with a random username and password,
and associates that account with the LTI identity.
"""
edx_password = str(uuid.uuid4())
created = False
while not created:
try:
edx_user_id = generate_random_edx_username()
edx_email = "{}@{}".format(edx_user_id, settings.LTI_USER_EMAIL_DOMAIN)
edx_user = User.objects.create_user(
username=edx_user_id,
password=edx_password,
email=edx_email,
)
# A profile is required if PREVENT_CONCURRENT_LOGINS flag is set.
# TODO: We could populate user information from the LTI launch here,
# but it's not necessary for our current uses.
edx_user_profile = UserProfile(user=edx_user)
edx_user_profile.save()
created = True
except IntegrityError:
# The random edx_user_id wasn't unique. Since 'created' is still
# False, we will retry with a different random ID.
pass
lti_user = LtiUser(
lti_consumer=lti_consumer,
lti_user_id=lti_user_id,
edx_user=edx_user
)
lti_user.save()
return lti_user
def switch_user(request, lti_user, lti_consumer):
"""
Log out the current user, and log in using the edX identity associated with
the LTI ID.
"""
edx_user = authenticate(
username=lti_user.edx_user.username,
lti_user_id=lti_user.lti_user_id,
lti_consumer=lti_consumer
)
if not edx_user:
# This shouldn't happen, since we've created edX accounts for any LTI
# users by this point, but just in case we can return a 403.
raise PermissionDenied()
login(request, edx_user)
def generate_random_edx_username():
"""
Create a valid random edX user ID. An ID is at most 30 characters long, and
can contain upper and lowercase letters and numbers.
:return:
"""
allowable_chars = string.ascii_letters + string.digits
username = ''
for _index in range(30):
username = username + random.SystemRandom().choice(allowable_chars)
return username
class LtiBackend(object):
"""
A Django authentication backend that authenticates users via LTI. This
backend will only return a User object if it is associated with an LTI
identity (i.e. the user was created by the create_lti_user method above).
"""
def authenticate(self, username=None, lti_user_id=None, lti_consumer=None):
"""
Try to authenticate a user. This method will return a Django user object
if a user with the corresponding username exists in the database, and
if a record that links that user with an LTI user_id field exists in
the LtiUser collection.
If such a user is not found, the method returns None (in line with the
authentication backend specification).
"""
try:
edx_user = User.objects.get(username=username)
except User.DoesNotExist:
return None
try:
LtiUser.objects.get(
edx_user_id=edx_user.id,
lti_user_id=lti_user_id,
lti_consumer=lti_consumer
)
except LtiUser.DoesNotExist:
return None
return edx_user
def get_user(self, user_id):
"""
Return the User object for a user that has already been authenticated by
this backend.
"""
try:
return User.objects.get(id=user_id)
except User.DoesNotExist:
return None
| agpl-3.0 |
KhalidGit/flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/flask/_compat.py | 783 | 2164 | # -*- coding: utf-8 -*-
"""
flask._compat
~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
PY2 = sys.version_info[0] == 2
_identity = lambda x: x
if not PY2:
text_type = str
string_types = (str,)
integer_types = (int, )
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
from io import StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
implements_to_string = _identity
else:
text_type = unicode
string_types = (str, unicode)
integer_types = (int, long)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
from cStringIO import StringIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
| apache-2.0 |
botchat/botclassifier | pythonbotchat/cleverbot/cleverbot_chat.py | 1 | 1770 | """
It is very simple selnium based code which cat with clevarbot and genrate log from chat
"""
import time
from datetime import datetime
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def botcollectlog(url,name):
browser = webdriver.Firefox()
browser.get(url)
name_of_log = datetime.now().strftime(name+"_%Y_%m_%d_%H_%M_%S.log")
writelog = open(name_of_log,'w')
# below list will change after based on db creation current it is static.list should have at least 20-25 questions
list_q = ["how are you?" ,"are you good thinker?", "well me absolutly fine ","yes","i like know more about you"]
#submit = browser.find_element_by_css_selector("input.bb#sayit")
#submit.click()
#submit = browser.find_element_by_css_selector(".obscure.extend.show")
#submit.click()
for line in list_q:
input = browser.find_element_by_id("stimulus")
input.send_keys(line)
time.sleep(2)
submit = browser.find_element_by_css_selector("input.bb#sayit")
submit.click()
writelog.write("["+time.strftime("%H:%M:%S:%s")+"]code: "+line+"\n")
wait = WebDriverWait(browser, 40)
element = wait.until(EC.presence_of_element_located((By.ID, "snipTextIcon")))
text = browser.find_element_by_id("typArea")
while text.text.strip() == "":
time.sleep(1)
text = browser.find_element_by_id("typArea")
writelog.write("["+time.strftime("%H:%M:%S:%s")+"]bot: "+text.text+"\n")
writelog.close()
if __name__ == "__main__":
url = "http://www.cleverbot.com/"
name = "cleverbot"
botcollectlog(url,name)
| mit |
dbbhattacharya/kitsune | kitsune/users/migrations/0002_move_nbNO_to_no_locale.py | 4 | 6682 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Switch all 'nb-NO' users to 'no'."""
orm.Profile.objects.filter(locale='nb-NO').update(locale='no')
def backwards(self, orm):
"""Switch all 'no' users to 'nb-NO'."""
orm.Profile.objects.filter(locale='no').update(locale='nb-NO')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'users.emailchange': {
'Meta': {'object_name': 'EmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'users.profile': {
'Meta': {'object_name': 'Profile'},
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'irc_handle': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'locale': ('kitsune.sumo.models.LocaleField', [], {'default': "'en-US'", 'max_length': '7'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'public_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'timezone': ('timezones.fields.TimeZoneField', [], {'null': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'users.registrationprofile': {
'Meta': {'object_name': 'RegistrationProfile'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'users.setting': {
'Meta': {'unique_together': "(('user', 'name'),)", 'object_name': 'Setting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'settings'", 'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'})
}
}
complete_apps = ['users']
symmetrical = True
| bsd-3-clause |
gtmanfred/livestreamer | src/livestreamer/plugins/common_swf.py | 38 | 1135 | from collections import namedtuple
from io import BytesIO
from livestreamer.utils import swfdecompress
from livestreamer.packages.flashmedia.types import U16LE, U32LE
__all__ = ["parse_swf"]
Rect = namedtuple("Rect", "data")
Tag = namedtuple("Tag", "type data")
SWF = namedtuple("SWF", "frame_size frame_rate frame_count tags")
def read_rect(fd):
header = ord(fd.read(1))
nbits = header >> 3
nbytes = int(((5 + 4 * nbits) + 7) / 8)
data = fd.read(nbytes - 1)
return Rect(data)
def read_tag(fd):
header = U16LE.read(fd)
tag_type = header >> 6
tag_length = header & 0x3f
if tag_length == 0x3f:
tag_length = U32LE.read(fd)
tag_data = fd.read(tag_length)
return Tag(tag_type, tag_data)
def read_tags(fd):
while True:
try:
yield read_tag(fd)
except IOError:
break
def parse_swf(data):
data = swfdecompress(data)
fd = BytesIO(data[8:])
frame_size = read_rect(fd)
frame_rate = U16LE.read(fd)
frame_count = U16LE.read(fd)
tags = list(read_tags(fd))
return SWF(frame_size, frame_rate, frame_count, tags)
| bsd-2-clause |
NicoloPernigo/portia | slybot/slybot/linkextractor/xml.py | 15 | 1521 | """
Link extraction for auto scraping
"""
from scrapy.link import Link
from scrapy.selector import Selector
from slybot.linkextractor.base import BaseLinkExtractor
class XmlLinkExtractor(BaseLinkExtractor):
"""Link extractor for XML sources"""
def __init__(self, xpath, **kwargs):
self.remove_namespaces = kwargs.pop('remove_namespaces', False)
super(XmlLinkExtractor, self).__init__(**kwargs)
self.xpath = xpath
def _extract_links(self, response):
type = 'html'
if response.body_as_unicode().strip().startswith('<?xml version='):
type = 'xml'
xxs = Selector(response, type=type)
if self.remove_namespaces:
xxs.remove_namespaces()
for url in xxs.xpath(self.xpath).extract():
yield Link(url.encode(response.encoding))
class RssLinkExtractor(XmlLinkExtractor):
"""Link extraction from RSS feeds"""
def __init__(self, **kwargs):
super(RssLinkExtractor, self).__init__("//item/link/text()", **kwargs)
class SitemapLinkExtractor(XmlLinkExtractor):
"""Link extraction for sitemap.xml feeds"""
def __init__(self, **kwargs):
kwargs['remove_namespaces'] = True
super(SitemapLinkExtractor, self).__init__("//urlset/url/loc/text() | //sitemapindex/sitemap/loc/text()", **kwargs)
class AtomLinkExtractor(XmlLinkExtractor):
def __init__(self, **kwargs):
kwargs['remove_namespaces'] = True
super(AtomLinkExtractor, self).__init__("//link/@href", **kwargs)
| bsd-3-clause |
hakehuang/rt-thread | components/external/freetype/src/tools/glnames.py | 360 | 105239 | #!/usr/bin/env python
#
#
# FreeType 2 glyph name builder
#
# Copyright 1996-2000, 2003, 2005, 2007, 2008, 2011 by
# David Turner, Robert Wilhelm, and Werner Lemberg.
#
# This file is part of the FreeType project, and may only be used, modified,
# and distributed under the terms of the FreeType project license,
# LICENSE.TXT. By continuing to use, modify, or distribute this file you
# indicate that you have read the license and understand and accept it
# fully.
"""\
usage: %s <output-file>
This python script generates the glyph names tables defined in the
`psnames' module.
Its single argument is the name of the header file to be created.
"""
import sys, string, struct, re, os.path
# This table lists the glyphs according to the Macintosh specification.
# It is used by the TrueType Postscript names table.
#
# See
#
# http://fonts.apple.com/TTRefMan/RM06/Chap6post.html
#
# for the official list.
#
mac_standard_names = \
[
# 0
".notdef", ".null", "nonmarkingreturn", "space", "exclam",
"quotedbl", "numbersign", "dollar", "percent", "ampersand",
# 10
"quotesingle", "parenleft", "parenright", "asterisk", "plus",
"comma", "hyphen", "period", "slash", "zero",
# 20
"one", "two", "three", "four", "five",
"six", "seven", "eight", "nine", "colon",
# 30
"semicolon", "less", "equal", "greater", "question",
"at", "A", "B", "C", "D",
# 40
"E", "F", "G", "H", "I",
"J", "K", "L", "M", "N",
# 50
"O", "P", "Q", "R", "S",
"T", "U", "V", "W", "X",
# 60
"Y", "Z", "bracketleft", "backslash", "bracketright",
"asciicircum", "underscore", "grave", "a", "b",
# 70
"c", "d", "e", "f", "g",
"h", "i", "j", "k", "l",
# 80
"m", "n", "o", "p", "q",
"r", "s", "t", "u", "v",
# 90
"w", "x", "y", "z", "braceleft",
"bar", "braceright", "asciitilde", "Adieresis", "Aring",
# 100
"Ccedilla", "Eacute", "Ntilde", "Odieresis", "Udieresis",
"aacute", "agrave", "acircumflex", "adieresis", "atilde",
# 110
"aring", "ccedilla", "eacute", "egrave", "ecircumflex",
"edieresis", "iacute", "igrave", "icircumflex", "idieresis",
# 120
"ntilde", "oacute", "ograve", "ocircumflex", "odieresis",
"otilde", "uacute", "ugrave", "ucircumflex", "udieresis",
# 130
"dagger", "degree", "cent", "sterling", "section",
"bullet", "paragraph", "germandbls", "registered", "copyright",
# 140
"trademark", "acute", "dieresis", "notequal", "AE",
"Oslash", "infinity", "plusminus", "lessequal", "greaterequal",
# 150
"yen", "mu", "partialdiff", "summation", "product",
"pi", "integral", "ordfeminine", "ordmasculine", "Omega",
# 160
"ae", "oslash", "questiondown", "exclamdown", "logicalnot",
"radical", "florin", "approxequal", "Delta", "guillemotleft",
# 170
"guillemotright", "ellipsis", "nonbreakingspace", "Agrave", "Atilde",
"Otilde", "OE", "oe", "endash", "emdash",
# 180
"quotedblleft", "quotedblright", "quoteleft", "quoteright", "divide",
"lozenge", "ydieresis", "Ydieresis", "fraction", "currency",
# 190
"guilsinglleft", "guilsinglright", "fi", "fl", "daggerdbl",
"periodcentered", "quotesinglbase", "quotedblbase", "perthousand",
"Acircumflex",
# 200
"Ecircumflex", "Aacute", "Edieresis", "Egrave", "Iacute",
"Icircumflex", "Idieresis", "Igrave", "Oacute", "Ocircumflex",
# 210
"apple", "Ograve", "Uacute", "Ucircumflex", "Ugrave",
"dotlessi", "circumflex", "tilde", "macron", "breve",
# 220
"dotaccent", "ring", "cedilla", "hungarumlaut", "ogonek",
"caron", "Lslash", "lslash", "Scaron", "scaron",
# 230
"Zcaron", "zcaron", "brokenbar", "Eth", "eth",
"Yacute", "yacute", "Thorn", "thorn", "minus",
# 240
"multiply", "onesuperior", "twosuperior", "threesuperior", "onehalf",
"onequarter", "threequarters", "franc", "Gbreve", "gbreve",
# 250
"Idotaccent", "Scedilla", "scedilla", "Cacute", "cacute",
"Ccaron", "ccaron", "dcroat"
]
# The list of standard `SID' glyph names. For the official list,
# see Annex A of document at
#
# http://partners.adobe.com/public/developer/en/font/5176.CFF.pdf .
#
sid_standard_names = \
[
# 0
".notdef", "space", "exclam", "quotedbl", "numbersign",
"dollar", "percent", "ampersand", "quoteright", "parenleft",
# 10
"parenright", "asterisk", "plus", "comma", "hyphen",
"period", "slash", "zero", "one", "two",
# 20
"three", "four", "five", "six", "seven",
"eight", "nine", "colon", "semicolon", "less",
# 30
"equal", "greater", "question", "at", "A",
"B", "C", "D", "E", "F",
# 40
"G", "H", "I", "J", "K",
"L", "M", "N", "O", "P",
# 50
"Q", "R", "S", "T", "U",
"V", "W", "X", "Y", "Z",
# 60
"bracketleft", "backslash", "bracketright", "asciicircum", "underscore",
"quoteleft", "a", "b", "c", "d",
# 70
"e", "f", "g", "h", "i",
"j", "k", "l", "m", "n",
# 80
"o", "p", "q", "r", "s",
"t", "u", "v", "w", "x",
# 90
"y", "z", "braceleft", "bar", "braceright",
"asciitilde", "exclamdown", "cent", "sterling", "fraction",
# 100
"yen", "florin", "section", "currency", "quotesingle",
"quotedblleft", "guillemotleft", "guilsinglleft", "guilsinglright", "fi",
# 110
"fl", "endash", "dagger", "daggerdbl", "periodcentered",
"paragraph", "bullet", "quotesinglbase", "quotedblbase", "quotedblright",
# 120
"guillemotright", "ellipsis", "perthousand", "questiondown", "grave",
"acute", "circumflex", "tilde", "macron", "breve",
# 130
"dotaccent", "dieresis", "ring", "cedilla", "hungarumlaut",
"ogonek", "caron", "emdash", "AE", "ordfeminine",
# 140
"Lslash", "Oslash", "OE", "ordmasculine", "ae",
"dotlessi", "lslash", "oslash", "oe", "germandbls",
# 150
"onesuperior", "logicalnot", "mu", "trademark", "Eth",
"onehalf", "plusminus", "Thorn", "onequarter", "divide",
# 160
"brokenbar", "degree", "thorn", "threequarters", "twosuperior",
"registered", "minus", "eth", "multiply", "threesuperior",
# 170
"copyright", "Aacute", "Acircumflex", "Adieresis", "Agrave",
"Aring", "Atilde", "Ccedilla", "Eacute", "Ecircumflex",
# 180
"Edieresis", "Egrave", "Iacute", "Icircumflex", "Idieresis",
"Igrave", "Ntilde", "Oacute", "Ocircumflex", "Odieresis",
# 190
"Ograve", "Otilde", "Scaron", "Uacute", "Ucircumflex",
"Udieresis", "Ugrave", "Yacute", "Ydieresis", "Zcaron",
# 200
"aacute", "acircumflex", "adieresis", "agrave", "aring",
"atilde", "ccedilla", "eacute", "ecircumflex", "edieresis",
# 210
"egrave", "iacute", "icircumflex", "idieresis", "igrave",
"ntilde", "oacute", "ocircumflex", "odieresis", "ograve",
# 220
"otilde", "scaron", "uacute", "ucircumflex", "udieresis",
"ugrave", "yacute", "ydieresis", "zcaron", "exclamsmall",
# 230
"Hungarumlautsmall", "dollaroldstyle", "dollarsuperior", "ampersandsmall",
"Acutesmall",
"parenleftsuperior", "parenrightsuperior", "twodotenleader",
"onedotenleader", "zerooldstyle",
# 240
"oneoldstyle", "twooldstyle", "threeoldstyle", "fouroldstyle",
"fiveoldstyle",
"sixoldstyle", "sevenoldstyle", "eightoldstyle", "nineoldstyle",
"commasuperior",
# 250
"threequartersemdash", "periodsuperior", "questionsmall", "asuperior",
"bsuperior",
"centsuperior", "dsuperior", "esuperior", "isuperior", "lsuperior",
# 260
"msuperior", "nsuperior", "osuperior", "rsuperior", "ssuperior",
"tsuperior", "ff", "ffi", "ffl", "parenleftinferior",
# 270
"parenrightinferior", "Circumflexsmall", "hyphensuperior", "Gravesmall",
"Asmall",
"Bsmall", "Csmall", "Dsmall", "Esmall", "Fsmall",
# 280
"Gsmall", "Hsmall", "Ismall", "Jsmall", "Ksmall",
"Lsmall", "Msmall", "Nsmall", "Osmall", "Psmall",
# 290
"Qsmall", "Rsmall", "Ssmall", "Tsmall", "Usmall",
"Vsmall", "Wsmall", "Xsmall", "Ysmall", "Zsmall",
# 300
"colonmonetary", "onefitted", "rupiah", "Tildesmall", "exclamdownsmall",
"centoldstyle", "Lslashsmall", "Scaronsmall", "Zcaronsmall",
"Dieresissmall",
# 310
"Brevesmall", "Caronsmall", "Dotaccentsmall", "Macronsmall", "figuredash",
"hypheninferior", "Ogoneksmall", "Ringsmall", "Cedillasmall",
"questiondownsmall",
# 320
"oneeighth", "threeeighths", "fiveeighths", "seveneighths", "onethird",
"twothirds", "zerosuperior", "foursuperior", "fivesuperior",
"sixsuperior",
# 330
"sevensuperior", "eightsuperior", "ninesuperior", "zeroinferior",
"oneinferior",
"twoinferior", "threeinferior", "fourinferior", "fiveinferior",
"sixinferior",
# 340
"seveninferior", "eightinferior", "nineinferior", "centinferior",
"dollarinferior",
"periodinferior", "commainferior", "Agravesmall", "Aacutesmall",
"Acircumflexsmall",
# 350
"Atildesmall", "Adieresissmall", "Aringsmall", "AEsmall", "Ccedillasmall",
"Egravesmall", "Eacutesmall", "Ecircumflexsmall", "Edieresissmall",
"Igravesmall",
# 360
"Iacutesmall", "Icircumflexsmall", "Idieresissmall", "Ethsmall",
"Ntildesmall",
"Ogravesmall", "Oacutesmall", "Ocircumflexsmall", "Otildesmall",
"Odieresissmall",
# 370
"OEsmall", "Oslashsmall", "Ugravesmall", "Uacutesmall",
"Ucircumflexsmall",
"Udieresissmall", "Yacutesmall", "Thornsmall", "Ydieresissmall",
"001.000",
# 380
"001.001", "001.002", "001.003", "Black", "Bold",
"Book", "Light", "Medium", "Regular", "Roman",
# 390
"Semibold"
]
# This table maps character codes of the Adobe Standard Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_standard_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 96, 97, 98, 99, 100, 101, 102, 103, 104,
105, 106, 107, 108, 109, 110, 0, 111, 112, 113,
114, 0, 115, 116, 117, 118, 119, 120, 121, 122,
0, 123, 0, 124, 125, 126, 127, 128, 129, 130,
131, 0, 132, 133, 0, 134, 135, 136, 137, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 138, 0, 139, 0, 0,
0, 0, 140, 141, 142, 143, 0, 0, 0, 0,
0, 144, 0, 0, 0, 145, 0, 0, 146, 147,
148, 149, 0, 0, 0, 0
]
# This table maps character codes of the Adobe Expert Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_expert_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 229, 230, 0, 231, 232, 233, 234,
235, 236, 237, 238, 13, 14, 15, 99, 239, 240,
241, 242, 243, 244, 245, 246, 247, 248, 27, 28,
249, 250, 251, 252, 0, 253, 254, 255, 256, 257,
0, 0, 0, 258, 0, 0, 259, 260, 261, 262,
0, 0, 263, 264, 265, 0, 266, 109, 110, 267,
268, 269, 0, 270, 271, 272, 273, 274, 275, 276,
277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
297, 298, 299, 300, 301, 302, 303, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 304, 305, 306, 0, 0, 307, 308, 309, 310,
311, 0, 312, 0, 0, 313, 0, 0, 314, 315,
0, 0, 316, 317, 318, 0, 0, 0, 158, 155,
163, 319, 320, 321, 322, 323, 324, 325, 0, 0,
326, 150, 164, 169, 327, 328, 329, 330, 331, 332,
333, 334, 335, 336, 337, 338, 339, 340, 341, 342,
343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
353, 354, 355, 356, 357, 358, 359, 360, 361, 362,
363, 364, 365, 366, 367, 368, 369, 370, 371, 372,
373, 374, 375, 376, 377, 378
]
# This data has been taken literally from the files `glyphlist.txt'
# and `zapfdingbats.txt' version 2.0, Sept 2002. It is available from
#
# http://sourceforge.net/adobe/aglfn/
#
adobe_glyph_list = """\
A;0041
AE;00C6
AEacute;01FC
AEmacron;01E2
AEsmall;F7E6
Aacute;00C1
Aacutesmall;F7E1
Abreve;0102
Abreveacute;1EAE
Abrevecyrillic;04D0
Abrevedotbelow;1EB6
Abrevegrave;1EB0
Abrevehookabove;1EB2
Abrevetilde;1EB4
Acaron;01CD
Acircle;24B6
Acircumflex;00C2
Acircumflexacute;1EA4
Acircumflexdotbelow;1EAC
Acircumflexgrave;1EA6
Acircumflexhookabove;1EA8
Acircumflexsmall;F7E2
Acircumflextilde;1EAA
Acute;F6C9
Acutesmall;F7B4
Acyrillic;0410
Adblgrave;0200
Adieresis;00C4
Adieresiscyrillic;04D2
Adieresismacron;01DE
Adieresissmall;F7E4
Adotbelow;1EA0
Adotmacron;01E0
Agrave;00C0
Agravesmall;F7E0
Ahookabove;1EA2
Aiecyrillic;04D4
Ainvertedbreve;0202
Alpha;0391
Alphatonos;0386
Amacron;0100
Amonospace;FF21
Aogonek;0104
Aring;00C5
Aringacute;01FA
Aringbelow;1E00
Aringsmall;F7E5
Asmall;F761
Atilde;00C3
Atildesmall;F7E3
Aybarmenian;0531
B;0042
Bcircle;24B7
Bdotaccent;1E02
Bdotbelow;1E04
Becyrillic;0411
Benarmenian;0532
Beta;0392
Bhook;0181
Blinebelow;1E06
Bmonospace;FF22
Brevesmall;F6F4
Bsmall;F762
Btopbar;0182
C;0043
Caarmenian;053E
Cacute;0106
Caron;F6CA
Caronsmall;F6F5
Ccaron;010C
Ccedilla;00C7
Ccedillaacute;1E08
Ccedillasmall;F7E7
Ccircle;24B8
Ccircumflex;0108
Cdot;010A
Cdotaccent;010A
Cedillasmall;F7B8
Chaarmenian;0549
Cheabkhasiancyrillic;04BC
Checyrillic;0427
Chedescenderabkhasiancyrillic;04BE
Chedescendercyrillic;04B6
Chedieresiscyrillic;04F4
Cheharmenian;0543
Chekhakassiancyrillic;04CB
Cheverticalstrokecyrillic;04B8
Chi;03A7
Chook;0187
Circumflexsmall;F6F6
Cmonospace;FF23
Coarmenian;0551
Csmall;F763
D;0044
DZ;01F1
DZcaron;01C4
Daarmenian;0534
Dafrican;0189
Dcaron;010E
Dcedilla;1E10
Dcircle;24B9
Dcircumflexbelow;1E12
Dcroat;0110
Ddotaccent;1E0A
Ddotbelow;1E0C
Decyrillic;0414
Deicoptic;03EE
Delta;2206
Deltagreek;0394
Dhook;018A
Dieresis;F6CB
DieresisAcute;F6CC
DieresisGrave;F6CD
Dieresissmall;F7A8
Digammagreek;03DC
Djecyrillic;0402
Dlinebelow;1E0E
Dmonospace;FF24
Dotaccentsmall;F6F7
Dslash;0110
Dsmall;F764
Dtopbar;018B
Dz;01F2
Dzcaron;01C5
Dzeabkhasiancyrillic;04E0
Dzecyrillic;0405
Dzhecyrillic;040F
E;0045
Eacute;00C9
Eacutesmall;F7E9
Ebreve;0114
Ecaron;011A
Ecedillabreve;1E1C
Echarmenian;0535
Ecircle;24BA
Ecircumflex;00CA
Ecircumflexacute;1EBE
Ecircumflexbelow;1E18
Ecircumflexdotbelow;1EC6
Ecircumflexgrave;1EC0
Ecircumflexhookabove;1EC2
Ecircumflexsmall;F7EA
Ecircumflextilde;1EC4
Ecyrillic;0404
Edblgrave;0204
Edieresis;00CB
Edieresissmall;F7EB
Edot;0116
Edotaccent;0116
Edotbelow;1EB8
Efcyrillic;0424
Egrave;00C8
Egravesmall;F7E8
Eharmenian;0537
Ehookabove;1EBA
Eightroman;2167
Einvertedbreve;0206
Eiotifiedcyrillic;0464
Elcyrillic;041B
Elevenroman;216A
Emacron;0112
Emacronacute;1E16
Emacrongrave;1E14
Emcyrillic;041C
Emonospace;FF25
Encyrillic;041D
Endescendercyrillic;04A2
Eng;014A
Enghecyrillic;04A4
Enhookcyrillic;04C7
Eogonek;0118
Eopen;0190
Epsilon;0395
Epsilontonos;0388
Ercyrillic;0420
Ereversed;018E
Ereversedcyrillic;042D
Escyrillic;0421
Esdescendercyrillic;04AA
Esh;01A9
Esmall;F765
Eta;0397
Etarmenian;0538
Etatonos;0389
Eth;00D0
Ethsmall;F7F0
Etilde;1EBC
Etildebelow;1E1A
Euro;20AC
Ezh;01B7
Ezhcaron;01EE
Ezhreversed;01B8
F;0046
Fcircle;24BB
Fdotaccent;1E1E
Feharmenian;0556
Feicoptic;03E4
Fhook;0191
Fitacyrillic;0472
Fiveroman;2164
Fmonospace;FF26
Fourroman;2163
Fsmall;F766
G;0047
GBsquare;3387
Gacute;01F4
Gamma;0393
Gammaafrican;0194
Gangiacoptic;03EA
Gbreve;011E
Gcaron;01E6
Gcedilla;0122
Gcircle;24BC
Gcircumflex;011C
Gcommaaccent;0122
Gdot;0120
Gdotaccent;0120
Gecyrillic;0413
Ghadarmenian;0542
Ghemiddlehookcyrillic;0494
Ghestrokecyrillic;0492
Gheupturncyrillic;0490
Ghook;0193
Gimarmenian;0533
Gjecyrillic;0403
Gmacron;1E20
Gmonospace;FF27
Grave;F6CE
Gravesmall;F760
Gsmall;F767
Gsmallhook;029B
Gstroke;01E4
H;0048
H18533;25CF
H18543;25AA
H18551;25AB
H22073;25A1
HPsquare;33CB
Haabkhasiancyrillic;04A8
Hadescendercyrillic;04B2
Hardsigncyrillic;042A
Hbar;0126
Hbrevebelow;1E2A
Hcedilla;1E28
Hcircle;24BD
Hcircumflex;0124
Hdieresis;1E26
Hdotaccent;1E22
Hdotbelow;1E24
Hmonospace;FF28
Hoarmenian;0540
Horicoptic;03E8
Hsmall;F768
Hungarumlaut;F6CF
Hungarumlautsmall;F6F8
Hzsquare;3390
I;0049
IAcyrillic;042F
IJ;0132
IUcyrillic;042E
Iacute;00CD
Iacutesmall;F7ED
Ibreve;012C
Icaron;01CF
Icircle;24BE
Icircumflex;00CE
Icircumflexsmall;F7EE
Icyrillic;0406
Idblgrave;0208
Idieresis;00CF
Idieresisacute;1E2E
Idieresiscyrillic;04E4
Idieresissmall;F7EF
Idot;0130
Idotaccent;0130
Idotbelow;1ECA
Iebrevecyrillic;04D6
Iecyrillic;0415
Ifraktur;2111
Igrave;00CC
Igravesmall;F7EC
Ihookabove;1EC8
Iicyrillic;0418
Iinvertedbreve;020A
Iishortcyrillic;0419
Imacron;012A
Imacroncyrillic;04E2
Imonospace;FF29
Iniarmenian;053B
Iocyrillic;0401
Iogonek;012E
Iota;0399
Iotaafrican;0196
Iotadieresis;03AA
Iotatonos;038A
Ismall;F769
Istroke;0197
Itilde;0128
Itildebelow;1E2C
Izhitsacyrillic;0474
Izhitsadblgravecyrillic;0476
J;004A
Jaarmenian;0541
Jcircle;24BF
Jcircumflex;0134
Jecyrillic;0408
Jheharmenian;054B
Jmonospace;FF2A
Jsmall;F76A
K;004B
KBsquare;3385
KKsquare;33CD
Kabashkircyrillic;04A0
Kacute;1E30
Kacyrillic;041A
Kadescendercyrillic;049A
Kahookcyrillic;04C3
Kappa;039A
Kastrokecyrillic;049E
Kaverticalstrokecyrillic;049C
Kcaron;01E8
Kcedilla;0136
Kcircle;24C0
Kcommaaccent;0136
Kdotbelow;1E32
Keharmenian;0554
Kenarmenian;053F
Khacyrillic;0425
Kheicoptic;03E6
Khook;0198
Kjecyrillic;040C
Klinebelow;1E34
Kmonospace;FF2B
Koppacyrillic;0480
Koppagreek;03DE
Ksicyrillic;046E
Ksmall;F76B
L;004C
LJ;01C7
LL;F6BF
Lacute;0139
Lambda;039B
Lcaron;013D
Lcedilla;013B
Lcircle;24C1
Lcircumflexbelow;1E3C
Lcommaaccent;013B
Ldot;013F
Ldotaccent;013F
Ldotbelow;1E36
Ldotbelowmacron;1E38
Liwnarmenian;053C
Lj;01C8
Ljecyrillic;0409
Llinebelow;1E3A
Lmonospace;FF2C
Lslash;0141
Lslashsmall;F6F9
Lsmall;F76C
M;004D
MBsquare;3386
Macron;F6D0
Macronsmall;F7AF
Macute;1E3E
Mcircle;24C2
Mdotaccent;1E40
Mdotbelow;1E42
Menarmenian;0544
Mmonospace;FF2D
Msmall;F76D
Mturned;019C
Mu;039C
N;004E
NJ;01CA
Nacute;0143
Ncaron;0147
Ncedilla;0145
Ncircle;24C3
Ncircumflexbelow;1E4A
Ncommaaccent;0145
Ndotaccent;1E44
Ndotbelow;1E46
Nhookleft;019D
Nineroman;2168
Nj;01CB
Njecyrillic;040A
Nlinebelow;1E48
Nmonospace;FF2E
Nowarmenian;0546
Nsmall;F76E
Ntilde;00D1
Ntildesmall;F7F1
Nu;039D
O;004F
OE;0152
OEsmall;F6FA
Oacute;00D3
Oacutesmall;F7F3
Obarredcyrillic;04E8
Obarreddieresiscyrillic;04EA
Obreve;014E
Ocaron;01D1
Ocenteredtilde;019F
Ocircle;24C4
Ocircumflex;00D4
Ocircumflexacute;1ED0
Ocircumflexdotbelow;1ED8
Ocircumflexgrave;1ED2
Ocircumflexhookabove;1ED4
Ocircumflexsmall;F7F4
Ocircumflextilde;1ED6
Ocyrillic;041E
Odblacute;0150
Odblgrave;020C
Odieresis;00D6
Odieresiscyrillic;04E6
Odieresissmall;F7F6
Odotbelow;1ECC
Ogoneksmall;F6FB
Ograve;00D2
Ogravesmall;F7F2
Oharmenian;0555
Ohm;2126
Ohookabove;1ECE
Ohorn;01A0
Ohornacute;1EDA
Ohorndotbelow;1EE2
Ohorngrave;1EDC
Ohornhookabove;1EDE
Ohorntilde;1EE0
Ohungarumlaut;0150
Oi;01A2
Oinvertedbreve;020E
Omacron;014C
Omacronacute;1E52
Omacrongrave;1E50
Omega;2126
Omegacyrillic;0460
Omegagreek;03A9
Omegaroundcyrillic;047A
Omegatitlocyrillic;047C
Omegatonos;038F
Omicron;039F
Omicrontonos;038C
Omonospace;FF2F
Oneroman;2160
Oogonek;01EA
Oogonekmacron;01EC
Oopen;0186
Oslash;00D8
Oslashacute;01FE
Oslashsmall;F7F8
Osmall;F76F
Ostrokeacute;01FE
Otcyrillic;047E
Otilde;00D5
Otildeacute;1E4C
Otildedieresis;1E4E
Otildesmall;F7F5
P;0050
Pacute;1E54
Pcircle;24C5
Pdotaccent;1E56
Pecyrillic;041F
Peharmenian;054A
Pemiddlehookcyrillic;04A6
Phi;03A6
Phook;01A4
Pi;03A0
Piwrarmenian;0553
Pmonospace;FF30
Psi;03A8
Psicyrillic;0470
Psmall;F770
Q;0051
Qcircle;24C6
Qmonospace;FF31
Qsmall;F771
R;0052
Raarmenian;054C
Racute;0154
Rcaron;0158
Rcedilla;0156
Rcircle;24C7
Rcommaaccent;0156
Rdblgrave;0210
Rdotaccent;1E58
Rdotbelow;1E5A
Rdotbelowmacron;1E5C
Reharmenian;0550
Rfraktur;211C
Rho;03A1
Ringsmall;F6FC
Rinvertedbreve;0212
Rlinebelow;1E5E
Rmonospace;FF32
Rsmall;F772
Rsmallinverted;0281
Rsmallinvertedsuperior;02B6
S;0053
SF010000;250C
SF020000;2514
SF030000;2510
SF040000;2518
SF050000;253C
SF060000;252C
SF070000;2534
SF080000;251C
SF090000;2524
SF100000;2500
SF110000;2502
SF190000;2561
SF200000;2562
SF210000;2556
SF220000;2555
SF230000;2563
SF240000;2551
SF250000;2557
SF260000;255D
SF270000;255C
SF280000;255B
SF360000;255E
SF370000;255F
SF380000;255A
SF390000;2554
SF400000;2569
SF410000;2566
SF420000;2560
SF430000;2550
SF440000;256C
SF450000;2567
SF460000;2568
SF470000;2564
SF480000;2565
SF490000;2559
SF500000;2558
SF510000;2552
SF520000;2553
SF530000;256B
SF540000;256A
Sacute;015A
Sacutedotaccent;1E64
Sampigreek;03E0
Scaron;0160
Scarondotaccent;1E66
Scaronsmall;F6FD
Scedilla;015E
Schwa;018F
Schwacyrillic;04D8
Schwadieresiscyrillic;04DA
Scircle;24C8
Scircumflex;015C
Scommaaccent;0218
Sdotaccent;1E60
Sdotbelow;1E62
Sdotbelowdotaccent;1E68
Seharmenian;054D
Sevenroman;2166
Shaarmenian;0547
Shacyrillic;0428
Shchacyrillic;0429
Sheicoptic;03E2
Shhacyrillic;04BA
Shimacoptic;03EC
Sigma;03A3
Sixroman;2165
Smonospace;FF33
Softsigncyrillic;042C
Ssmall;F773
Stigmagreek;03DA
T;0054
Tau;03A4
Tbar;0166
Tcaron;0164
Tcedilla;0162
Tcircle;24C9
Tcircumflexbelow;1E70
Tcommaaccent;0162
Tdotaccent;1E6A
Tdotbelow;1E6C
Tecyrillic;0422
Tedescendercyrillic;04AC
Tenroman;2169
Tetsecyrillic;04B4
Theta;0398
Thook;01AC
Thorn;00DE
Thornsmall;F7FE
Threeroman;2162
Tildesmall;F6FE
Tiwnarmenian;054F
Tlinebelow;1E6E
Tmonospace;FF34
Toarmenian;0539
Tonefive;01BC
Tonesix;0184
Tonetwo;01A7
Tretroflexhook;01AE
Tsecyrillic;0426
Tshecyrillic;040B
Tsmall;F774
Twelveroman;216B
Tworoman;2161
U;0055
Uacute;00DA
Uacutesmall;F7FA
Ubreve;016C
Ucaron;01D3
Ucircle;24CA
Ucircumflex;00DB
Ucircumflexbelow;1E76
Ucircumflexsmall;F7FB
Ucyrillic;0423
Udblacute;0170
Udblgrave;0214
Udieresis;00DC
Udieresisacute;01D7
Udieresisbelow;1E72
Udieresiscaron;01D9
Udieresiscyrillic;04F0
Udieresisgrave;01DB
Udieresismacron;01D5
Udieresissmall;F7FC
Udotbelow;1EE4
Ugrave;00D9
Ugravesmall;F7F9
Uhookabove;1EE6
Uhorn;01AF
Uhornacute;1EE8
Uhorndotbelow;1EF0
Uhorngrave;1EEA
Uhornhookabove;1EEC
Uhorntilde;1EEE
Uhungarumlaut;0170
Uhungarumlautcyrillic;04F2
Uinvertedbreve;0216
Ukcyrillic;0478
Umacron;016A
Umacroncyrillic;04EE
Umacrondieresis;1E7A
Umonospace;FF35
Uogonek;0172
Upsilon;03A5
Upsilon1;03D2
Upsilonacutehooksymbolgreek;03D3
Upsilonafrican;01B1
Upsilondieresis;03AB
Upsilondieresishooksymbolgreek;03D4
Upsilonhooksymbol;03D2
Upsilontonos;038E
Uring;016E
Ushortcyrillic;040E
Usmall;F775
Ustraightcyrillic;04AE
Ustraightstrokecyrillic;04B0
Utilde;0168
Utildeacute;1E78
Utildebelow;1E74
V;0056
Vcircle;24CB
Vdotbelow;1E7E
Vecyrillic;0412
Vewarmenian;054E
Vhook;01B2
Vmonospace;FF36
Voarmenian;0548
Vsmall;F776
Vtilde;1E7C
W;0057
Wacute;1E82
Wcircle;24CC
Wcircumflex;0174
Wdieresis;1E84
Wdotaccent;1E86
Wdotbelow;1E88
Wgrave;1E80
Wmonospace;FF37
Wsmall;F777
X;0058
Xcircle;24CD
Xdieresis;1E8C
Xdotaccent;1E8A
Xeharmenian;053D
Xi;039E
Xmonospace;FF38
Xsmall;F778
Y;0059
Yacute;00DD
Yacutesmall;F7FD
Yatcyrillic;0462
Ycircle;24CE
Ycircumflex;0176
Ydieresis;0178
Ydieresissmall;F7FF
Ydotaccent;1E8E
Ydotbelow;1EF4
Yericyrillic;042B
Yerudieresiscyrillic;04F8
Ygrave;1EF2
Yhook;01B3
Yhookabove;1EF6
Yiarmenian;0545
Yicyrillic;0407
Yiwnarmenian;0552
Ymonospace;FF39
Ysmall;F779
Ytilde;1EF8
Yusbigcyrillic;046A
Yusbigiotifiedcyrillic;046C
Yuslittlecyrillic;0466
Yuslittleiotifiedcyrillic;0468
Z;005A
Zaarmenian;0536
Zacute;0179
Zcaron;017D
Zcaronsmall;F6FF
Zcircle;24CF
Zcircumflex;1E90
Zdot;017B
Zdotaccent;017B
Zdotbelow;1E92
Zecyrillic;0417
Zedescendercyrillic;0498
Zedieresiscyrillic;04DE
Zeta;0396
Zhearmenian;053A
Zhebrevecyrillic;04C1
Zhecyrillic;0416
Zhedescendercyrillic;0496
Zhedieresiscyrillic;04DC
Zlinebelow;1E94
Zmonospace;FF3A
Zsmall;F77A
Zstroke;01B5
a;0061
aabengali;0986
aacute;00E1
aadeva;0906
aagujarati;0A86
aagurmukhi;0A06
aamatragurmukhi;0A3E
aarusquare;3303
aavowelsignbengali;09BE
aavowelsigndeva;093E
aavowelsigngujarati;0ABE
abbreviationmarkarmenian;055F
abbreviationsigndeva;0970
abengali;0985
abopomofo;311A
abreve;0103
abreveacute;1EAF
abrevecyrillic;04D1
abrevedotbelow;1EB7
abrevegrave;1EB1
abrevehookabove;1EB3
abrevetilde;1EB5
acaron;01CE
acircle;24D0
acircumflex;00E2
acircumflexacute;1EA5
acircumflexdotbelow;1EAD
acircumflexgrave;1EA7
acircumflexhookabove;1EA9
acircumflextilde;1EAB
acute;00B4
acutebelowcmb;0317
acutecmb;0301
acutecomb;0301
acutedeva;0954
acutelowmod;02CF
acutetonecmb;0341
acyrillic;0430
adblgrave;0201
addakgurmukhi;0A71
adeva;0905
adieresis;00E4
adieresiscyrillic;04D3
adieresismacron;01DF
adotbelow;1EA1
adotmacron;01E1
ae;00E6
aeacute;01FD
aekorean;3150
aemacron;01E3
afii00208;2015
afii08941;20A4
afii10017;0410
afii10018;0411
afii10019;0412
afii10020;0413
afii10021;0414
afii10022;0415
afii10023;0401
afii10024;0416
afii10025;0417
afii10026;0418
afii10027;0419
afii10028;041A
afii10029;041B
afii10030;041C
afii10031;041D
afii10032;041E
afii10033;041F
afii10034;0420
afii10035;0421
afii10036;0422
afii10037;0423
afii10038;0424
afii10039;0425
afii10040;0426
afii10041;0427
afii10042;0428
afii10043;0429
afii10044;042A
afii10045;042B
afii10046;042C
afii10047;042D
afii10048;042E
afii10049;042F
afii10050;0490
afii10051;0402
afii10052;0403
afii10053;0404
afii10054;0405
afii10055;0406
afii10056;0407
afii10057;0408
afii10058;0409
afii10059;040A
afii10060;040B
afii10061;040C
afii10062;040E
afii10063;F6C4
afii10064;F6C5
afii10065;0430
afii10066;0431
afii10067;0432
afii10068;0433
afii10069;0434
afii10070;0435
afii10071;0451
afii10072;0436
afii10073;0437
afii10074;0438
afii10075;0439
afii10076;043A
afii10077;043B
afii10078;043C
afii10079;043D
afii10080;043E
afii10081;043F
afii10082;0440
afii10083;0441
afii10084;0442
afii10085;0443
afii10086;0444
afii10087;0445
afii10088;0446
afii10089;0447
afii10090;0448
afii10091;0449
afii10092;044A
afii10093;044B
afii10094;044C
afii10095;044D
afii10096;044E
afii10097;044F
afii10098;0491
afii10099;0452
afii10100;0453
afii10101;0454
afii10102;0455
afii10103;0456
afii10104;0457
afii10105;0458
afii10106;0459
afii10107;045A
afii10108;045B
afii10109;045C
afii10110;045E
afii10145;040F
afii10146;0462
afii10147;0472
afii10148;0474
afii10192;F6C6
afii10193;045F
afii10194;0463
afii10195;0473
afii10196;0475
afii10831;F6C7
afii10832;F6C8
afii10846;04D9
afii299;200E
afii300;200F
afii301;200D
afii57381;066A
afii57388;060C
afii57392;0660
afii57393;0661
afii57394;0662
afii57395;0663
afii57396;0664
afii57397;0665
afii57398;0666
afii57399;0667
afii57400;0668
afii57401;0669
afii57403;061B
afii57407;061F
afii57409;0621
afii57410;0622
afii57411;0623
afii57412;0624
afii57413;0625
afii57414;0626
afii57415;0627
afii57416;0628
afii57417;0629
afii57418;062A
afii57419;062B
afii57420;062C
afii57421;062D
afii57422;062E
afii57423;062F
afii57424;0630
afii57425;0631
afii57426;0632
afii57427;0633
afii57428;0634
afii57429;0635
afii57430;0636
afii57431;0637
afii57432;0638
afii57433;0639
afii57434;063A
afii57440;0640
afii57441;0641
afii57442;0642
afii57443;0643
afii57444;0644
afii57445;0645
afii57446;0646
afii57448;0648
afii57449;0649
afii57450;064A
afii57451;064B
afii57452;064C
afii57453;064D
afii57454;064E
afii57455;064F
afii57456;0650
afii57457;0651
afii57458;0652
afii57470;0647
afii57505;06A4
afii57506;067E
afii57507;0686
afii57508;0698
afii57509;06AF
afii57511;0679
afii57512;0688
afii57513;0691
afii57514;06BA
afii57519;06D2
afii57534;06D5
afii57636;20AA
afii57645;05BE
afii57658;05C3
afii57664;05D0
afii57665;05D1
afii57666;05D2
afii57667;05D3
afii57668;05D4
afii57669;05D5
afii57670;05D6
afii57671;05D7
afii57672;05D8
afii57673;05D9
afii57674;05DA
afii57675;05DB
afii57676;05DC
afii57677;05DD
afii57678;05DE
afii57679;05DF
afii57680;05E0
afii57681;05E1
afii57682;05E2
afii57683;05E3
afii57684;05E4
afii57685;05E5
afii57686;05E6
afii57687;05E7
afii57688;05E8
afii57689;05E9
afii57690;05EA
afii57694;FB2A
afii57695;FB2B
afii57700;FB4B
afii57705;FB1F
afii57716;05F0
afii57717;05F1
afii57718;05F2
afii57723;FB35
afii57793;05B4
afii57794;05B5
afii57795;05B6
afii57796;05BB
afii57797;05B8
afii57798;05B7
afii57799;05B0
afii57800;05B2
afii57801;05B1
afii57802;05B3
afii57803;05C2
afii57804;05C1
afii57806;05B9
afii57807;05BC
afii57839;05BD
afii57841;05BF
afii57842;05C0
afii57929;02BC
afii61248;2105
afii61289;2113
afii61352;2116
afii61573;202C
afii61574;202D
afii61575;202E
afii61664;200C
afii63167;066D
afii64937;02BD
agrave;00E0
agujarati;0A85
agurmukhi;0A05
ahiragana;3042
ahookabove;1EA3
aibengali;0990
aibopomofo;311E
aideva;0910
aiecyrillic;04D5
aigujarati;0A90
aigurmukhi;0A10
aimatragurmukhi;0A48
ainarabic;0639
ainfinalarabic;FECA
aininitialarabic;FECB
ainmedialarabic;FECC
ainvertedbreve;0203
aivowelsignbengali;09C8
aivowelsigndeva;0948
aivowelsigngujarati;0AC8
akatakana;30A2
akatakanahalfwidth;FF71
akorean;314F
alef;05D0
alefarabic;0627
alefdageshhebrew;FB30
aleffinalarabic;FE8E
alefhamzaabovearabic;0623
alefhamzaabovefinalarabic;FE84
alefhamzabelowarabic;0625
alefhamzabelowfinalarabic;FE88
alefhebrew;05D0
aleflamedhebrew;FB4F
alefmaddaabovearabic;0622
alefmaddaabovefinalarabic;FE82
alefmaksuraarabic;0649
alefmaksurafinalarabic;FEF0
alefmaksurainitialarabic;FEF3
alefmaksuramedialarabic;FEF4
alefpatahhebrew;FB2E
alefqamatshebrew;FB2F
aleph;2135
allequal;224C
alpha;03B1
alphatonos;03AC
amacron;0101
amonospace;FF41
ampersand;0026
ampersandmonospace;FF06
ampersandsmall;F726
amsquare;33C2
anbopomofo;3122
angbopomofo;3124
angkhankhuthai;0E5A
angle;2220
anglebracketleft;3008
anglebracketleftvertical;FE3F
anglebracketright;3009
anglebracketrightvertical;FE40
angleleft;2329
angleright;232A
angstrom;212B
anoteleia;0387
anudattadeva;0952
anusvarabengali;0982
anusvaradeva;0902
anusvaragujarati;0A82
aogonek;0105
apaatosquare;3300
aparen;249C
apostrophearmenian;055A
apostrophemod;02BC
apple;F8FF
approaches;2250
approxequal;2248
approxequalorimage;2252
approximatelyequal;2245
araeaekorean;318E
araeakorean;318D
arc;2312
arighthalfring;1E9A
aring;00E5
aringacute;01FB
aringbelow;1E01
arrowboth;2194
arrowdashdown;21E3
arrowdashleft;21E0
arrowdashright;21E2
arrowdashup;21E1
arrowdblboth;21D4
arrowdbldown;21D3
arrowdblleft;21D0
arrowdblright;21D2
arrowdblup;21D1
arrowdown;2193
arrowdownleft;2199
arrowdownright;2198
arrowdownwhite;21E9
arrowheaddownmod;02C5
arrowheadleftmod;02C2
arrowheadrightmod;02C3
arrowheadupmod;02C4
arrowhorizex;F8E7
arrowleft;2190
arrowleftdbl;21D0
arrowleftdblstroke;21CD
arrowleftoverright;21C6
arrowleftwhite;21E6
arrowright;2192
arrowrightdblstroke;21CF
arrowrightheavy;279E
arrowrightoverleft;21C4
arrowrightwhite;21E8
arrowtableft;21E4
arrowtabright;21E5
arrowup;2191
arrowupdn;2195
arrowupdnbse;21A8
arrowupdownbase;21A8
arrowupleft;2196
arrowupleftofdown;21C5
arrowupright;2197
arrowupwhite;21E7
arrowvertex;F8E6
asciicircum;005E
asciicircummonospace;FF3E
asciitilde;007E
asciitildemonospace;FF5E
ascript;0251
ascriptturned;0252
asmallhiragana;3041
asmallkatakana;30A1
asmallkatakanahalfwidth;FF67
asterisk;002A
asteriskaltonearabic;066D
asteriskarabic;066D
asteriskmath;2217
asteriskmonospace;FF0A
asterisksmall;FE61
asterism;2042
asuperior;F6E9
asymptoticallyequal;2243
at;0040
atilde;00E3
atmonospace;FF20
atsmall;FE6B
aturned;0250
aubengali;0994
aubopomofo;3120
audeva;0914
augujarati;0A94
augurmukhi;0A14
aulengthmarkbengali;09D7
aumatragurmukhi;0A4C
auvowelsignbengali;09CC
auvowelsigndeva;094C
auvowelsigngujarati;0ACC
avagrahadeva;093D
aybarmenian;0561
ayin;05E2
ayinaltonehebrew;FB20
ayinhebrew;05E2
b;0062
babengali;09AC
backslash;005C
backslashmonospace;FF3C
badeva;092C
bagujarati;0AAC
bagurmukhi;0A2C
bahiragana;3070
bahtthai;0E3F
bakatakana;30D0
bar;007C
barmonospace;FF5C
bbopomofo;3105
bcircle;24D1
bdotaccent;1E03
bdotbelow;1E05
beamedsixteenthnotes;266C
because;2235
becyrillic;0431
beharabic;0628
behfinalarabic;FE90
behinitialarabic;FE91
behiragana;3079
behmedialarabic;FE92
behmeeminitialarabic;FC9F
behmeemisolatedarabic;FC08
behnoonfinalarabic;FC6D
bekatakana;30D9
benarmenian;0562
bet;05D1
beta;03B2
betasymbolgreek;03D0
betdagesh;FB31
betdageshhebrew;FB31
bethebrew;05D1
betrafehebrew;FB4C
bhabengali;09AD
bhadeva;092D
bhagujarati;0AAD
bhagurmukhi;0A2D
bhook;0253
bihiragana;3073
bikatakana;30D3
bilabialclick;0298
bindigurmukhi;0A02
birusquare;3331
blackcircle;25CF
blackdiamond;25C6
blackdownpointingtriangle;25BC
blackleftpointingpointer;25C4
blackleftpointingtriangle;25C0
blacklenticularbracketleft;3010
blacklenticularbracketleftvertical;FE3B
blacklenticularbracketright;3011
blacklenticularbracketrightvertical;FE3C
blacklowerlefttriangle;25E3
blacklowerrighttriangle;25E2
blackrectangle;25AC
blackrightpointingpointer;25BA
blackrightpointingtriangle;25B6
blacksmallsquare;25AA
blacksmilingface;263B
blacksquare;25A0
blackstar;2605
blackupperlefttriangle;25E4
blackupperrighttriangle;25E5
blackuppointingsmalltriangle;25B4
blackuppointingtriangle;25B2
blank;2423
blinebelow;1E07
block;2588
bmonospace;FF42
bobaimaithai;0E1A
bohiragana;307C
bokatakana;30DC
bparen;249D
bqsquare;33C3
braceex;F8F4
braceleft;007B
braceleftbt;F8F3
braceleftmid;F8F2
braceleftmonospace;FF5B
braceleftsmall;FE5B
bracelefttp;F8F1
braceleftvertical;FE37
braceright;007D
bracerightbt;F8FE
bracerightmid;F8FD
bracerightmonospace;FF5D
bracerightsmall;FE5C
bracerighttp;F8FC
bracerightvertical;FE38
bracketleft;005B
bracketleftbt;F8F0
bracketleftex;F8EF
bracketleftmonospace;FF3B
bracketlefttp;F8EE
bracketright;005D
bracketrightbt;F8FB
bracketrightex;F8FA
bracketrightmonospace;FF3D
bracketrighttp;F8F9
breve;02D8
brevebelowcmb;032E
brevecmb;0306
breveinvertedbelowcmb;032F
breveinvertedcmb;0311
breveinverteddoublecmb;0361
bridgebelowcmb;032A
bridgeinvertedbelowcmb;033A
brokenbar;00A6
bstroke;0180
bsuperior;F6EA
btopbar;0183
buhiragana;3076
bukatakana;30D6
bullet;2022
bulletinverse;25D8
bulletoperator;2219
bullseye;25CE
c;0063
caarmenian;056E
cabengali;099A
cacute;0107
cadeva;091A
cagujarati;0A9A
cagurmukhi;0A1A
calsquare;3388
candrabindubengali;0981
candrabinducmb;0310
candrabindudeva;0901
candrabindugujarati;0A81
capslock;21EA
careof;2105
caron;02C7
caronbelowcmb;032C
caroncmb;030C
carriagereturn;21B5
cbopomofo;3118
ccaron;010D
ccedilla;00E7
ccedillaacute;1E09
ccircle;24D2
ccircumflex;0109
ccurl;0255
cdot;010B
cdotaccent;010B
cdsquare;33C5
cedilla;00B8
cedillacmb;0327
cent;00A2
centigrade;2103
centinferior;F6DF
centmonospace;FFE0
centoldstyle;F7A2
centsuperior;F6E0
chaarmenian;0579
chabengali;099B
chadeva;091B
chagujarati;0A9B
chagurmukhi;0A1B
chbopomofo;3114
cheabkhasiancyrillic;04BD
checkmark;2713
checyrillic;0447
chedescenderabkhasiancyrillic;04BF
chedescendercyrillic;04B7
chedieresiscyrillic;04F5
cheharmenian;0573
chekhakassiancyrillic;04CC
cheverticalstrokecyrillic;04B9
chi;03C7
chieuchacirclekorean;3277
chieuchaparenkorean;3217
chieuchcirclekorean;3269
chieuchkorean;314A
chieuchparenkorean;3209
chochangthai;0E0A
chochanthai;0E08
chochingthai;0E09
chochoethai;0E0C
chook;0188
cieucacirclekorean;3276
cieucaparenkorean;3216
cieuccirclekorean;3268
cieuckorean;3148
cieucparenkorean;3208
cieucuparenkorean;321C
circle;25CB
circlemultiply;2297
circleot;2299
circleplus;2295
circlepostalmark;3036
circlewithlefthalfblack;25D0
circlewithrighthalfblack;25D1
circumflex;02C6
circumflexbelowcmb;032D
circumflexcmb;0302
clear;2327
clickalveolar;01C2
clickdental;01C0
clicklateral;01C1
clickretroflex;01C3
club;2663
clubsuitblack;2663
clubsuitwhite;2667
cmcubedsquare;33A4
cmonospace;FF43
cmsquaredsquare;33A0
coarmenian;0581
colon;003A
colonmonetary;20A1
colonmonospace;FF1A
colonsign;20A1
colonsmall;FE55
colontriangularhalfmod;02D1
colontriangularmod;02D0
comma;002C
commaabovecmb;0313
commaaboverightcmb;0315
commaaccent;F6C3
commaarabic;060C
commaarmenian;055D
commainferior;F6E1
commamonospace;FF0C
commareversedabovecmb;0314
commareversedmod;02BD
commasmall;FE50
commasuperior;F6E2
commaturnedabovecmb;0312
commaturnedmod;02BB
compass;263C
congruent;2245
contourintegral;222E
control;2303
controlACK;0006
controlBEL;0007
controlBS;0008
controlCAN;0018
controlCR;000D
controlDC1;0011
controlDC2;0012
controlDC3;0013
controlDC4;0014
controlDEL;007F
controlDLE;0010
controlEM;0019
controlENQ;0005
controlEOT;0004
controlESC;001B
controlETB;0017
controlETX;0003
controlFF;000C
controlFS;001C
controlGS;001D
controlHT;0009
controlLF;000A
controlNAK;0015
controlRS;001E
controlSI;000F
controlSO;000E
controlSOT;0002
controlSTX;0001
controlSUB;001A
controlSYN;0016
controlUS;001F
controlVT;000B
copyright;00A9
copyrightsans;F8E9
copyrightserif;F6D9
cornerbracketleft;300C
cornerbracketlefthalfwidth;FF62
cornerbracketleftvertical;FE41
cornerbracketright;300D
cornerbracketrighthalfwidth;FF63
cornerbracketrightvertical;FE42
corporationsquare;337F
cosquare;33C7
coverkgsquare;33C6
cparen;249E
cruzeiro;20A2
cstretched;0297
curlyand;22CF
curlyor;22CE
currency;00A4
cyrBreve;F6D1
cyrFlex;F6D2
cyrbreve;F6D4
cyrflex;F6D5
d;0064
daarmenian;0564
dabengali;09A6
dadarabic;0636
dadeva;0926
dadfinalarabic;FEBE
dadinitialarabic;FEBF
dadmedialarabic;FEC0
dagesh;05BC
dageshhebrew;05BC
dagger;2020
daggerdbl;2021
dagujarati;0AA6
dagurmukhi;0A26
dahiragana;3060
dakatakana;30C0
dalarabic;062F
dalet;05D3
daletdagesh;FB33
daletdageshhebrew;FB33
dalethatafpatah;05D3 05B2
dalethatafpatahhebrew;05D3 05B2
dalethatafsegol;05D3 05B1
dalethatafsegolhebrew;05D3 05B1
dalethebrew;05D3
dalethiriq;05D3 05B4
dalethiriqhebrew;05D3 05B4
daletholam;05D3 05B9
daletholamhebrew;05D3 05B9
daletpatah;05D3 05B7
daletpatahhebrew;05D3 05B7
daletqamats;05D3 05B8
daletqamatshebrew;05D3 05B8
daletqubuts;05D3 05BB
daletqubutshebrew;05D3 05BB
daletsegol;05D3 05B6
daletsegolhebrew;05D3 05B6
daletsheva;05D3 05B0
daletshevahebrew;05D3 05B0
dalettsere;05D3 05B5
dalettserehebrew;05D3 05B5
dalfinalarabic;FEAA
dammaarabic;064F
dammalowarabic;064F
dammatanaltonearabic;064C
dammatanarabic;064C
danda;0964
dargahebrew;05A7
dargalefthebrew;05A7
dasiapneumatacyrilliccmb;0485
dblGrave;F6D3
dblanglebracketleft;300A
dblanglebracketleftvertical;FE3D
dblanglebracketright;300B
dblanglebracketrightvertical;FE3E
dblarchinvertedbelowcmb;032B
dblarrowleft;21D4
dblarrowright;21D2
dbldanda;0965
dblgrave;F6D6
dblgravecmb;030F
dblintegral;222C
dbllowline;2017
dbllowlinecmb;0333
dbloverlinecmb;033F
dblprimemod;02BA
dblverticalbar;2016
dblverticallineabovecmb;030E
dbopomofo;3109
dbsquare;33C8
dcaron;010F
dcedilla;1E11
dcircle;24D3
dcircumflexbelow;1E13
dcroat;0111
ddabengali;09A1
ddadeva;0921
ddagujarati;0AA1
ddagurmukhi;0A21
ddalarabic;0688
ddalfinalarabic;FB89
dddhadeva;095C
ddhabengali;09A2
ddhadeva;0922
ddhagujarati;0AA2
ddhagurmukhi;0A22
ddotaccent;1E0B
ddotbelow;1E0D
decimalseparatorarabic;066B
decimalseparatorpersian;066B
decyrillic;0434
degree;00B0
dehihebrew;05AD
dehiragana;3067
deicoptic;03EF
dekatakana;30C7
deleteleft;232B
deleteright;2326
delta;03B4
deltaturned;018D
denominatorminusonenumeratorbengali;09F8
dezh;02A4
dhabengali;09A7
dhadeva;0927
dhagujarati;0AA7
dhagurmukhi;0A27
dhook;0257
dialytikatonos;0385
dialytikatonoscmb;0344
diamond;2666
diamondsuitwhite;2662
dieresis;00A8
dieresisacute;F6D7
dieresisbelowcmb;0324
dieresiscmb;0308
dieresisgrave;F6D8
dieresistonos;0385
dihiragana;3062
dikatakana;30C2
dittomark;3003
divide;00F7
divides;2223
divisionslash;2215
djecyrillic;0452
dkshade;2593
dlinebelow;1E0F
dlsquare;3397
dmacron;0111
dmonospace;FF44
dnblock;2584
dochadathai;0E0E
dodekthai;0E14
dohiragana;3069
dokatakana;30C9
dollar;0024
dollarinferior;F6E3
dollarmonospace;FF04
dollaroldstyle;F724
dollarsmall;FE69
dollarsuperior;F6E4
dong;20AB
dorusquare;3326
dotaccent;02D9
dotaccentcmb;0307
dotbelowcmb;0323
dotbelowcomb;0323
dotkatakana;30FB
dotlessi;0131
dotlessj;F6BE
dotlessjstrokehook;0284
dotmath;22C5
dottedcircle;25CC
doubleyodpatah;FB1F
doubleyodpatahhebrew;FB1F
downtackbelowcmb;031E
downtackmod;02D5
dparen;249F
dsuperior;F6EB
dtail;0256
dtopbar;018C
duhiragana;3065
dukatakana;30C5
dz;01F3
dzaltone;02A3
dzcaron;01C6
dzcurl;02A5
dzeabkhasiancyrillic;04E1
dzecyrillic;0455
dzhecyrillic;045F
e;0065
eacute;00E9
earth;2641
ebengali;098F
ebopomofo;311C
ebreve;0115
ecandradeva;090D
ecandragujarati;0A8D
ecandravowelsigndeva;0945
ecandravowelsigngujarati;0AC5
ecaron;011B
ecedillabreve;1E1D
echarmenian;0565
echyiwnarmenian;0587
ecircle;24D4
ecircumflex;00EA
ecircumflexacute;1EBF
ecircumflexbelow;1E19
ecircumflexdotbelow;1EC7
ecircumflexgrave;1EC1
ecircumflexhookabove;1EC3
ecircumflextilde;1EC5
ecyrillic;0454
edblgrave;0205
edeva;090F
edieresis;00EB
edot;0117
edotaccent;0117
edotbelow;1EB9
eegurmukhi;0A0F
eematragurmukhi;0A47
efcyrillic;0444
egrave;00E8
egujarati;0A8F
eharmenian;0567
ehbopomofo;311D
ehiragana;3048
ehookabove;1EBB
eibopomofo;311F
eight;0038
eightarabic;0668
eightbengali;09EE
eightcircle;2467
eightcircleinversesansserif;2791
eightdeva;096E
eighteencircle;2471
eighteenparen;2485
eighteenperiod;2499
eightgujarati;0AEE
eightgurmukhi;0A6E
eighthackarabic;0668
eighthangzhou;3028
eighthnotebeamed;266B
eightideographicparen;3227
eightinferior;2088
eightmonospace;FF18
eightoldstyle;F738
eightparen;247B
eightperiod;248F
eightpersian;06F8
eightroman;2177
eightsuperior;2078
eightthai;0E58
einvertedbreve;0207
eiotifiedcyrillic;0465
ekatakana;30A8
ekatakanahalfwidth;FF74
ekonkargurmukhi;0A74
ekorean;3154
elcyrillic;043B
element;2208
elevencircle;246A
elevenparen;247E
elevenperiod;2492
elevenroman;217A
ellipsis;2026
ellipsisvertical;22EE
emacron;0113
emacronacute;1E17
emacrongrave;1E15
emcyrillic;043C
emdash;2014
emdashvertical;FE31
emonospace;FF45
emphasismarkarmenian;055B
emptyset;2205
enbopomofo;3123
encyrillic;043D
endash;2013
endashvertical;FE32
endescendercyrillic;04A3
eng;014B
engbopomofo;3125
enghecyrillic;04A5
enhookcyrillic;04C8
enspace;2002
eogonek;0119
eokorean;3153
eopen;025B
eopenclosed;029A
eopenreversed;025C
eopenreversedclosed;025E
eopenreversedhook;025D
eparen;24A0
epsilon;03B5
epsilontonos;03AD
equal;003D
equalmonospace;FF1D
equalsmall;FE66
equalsuperior;207C
equivalence;2261
erbopomofo;3126
ercyrillic;0440
ereversed;0258
ereversedcyrillic;044D
escyrillic;0441
esdescendercyrillic;04AB
esh;0283
eshcurl;0286
eshortdeva;090E
eshortvowelsigndeva;0946
eshreversedloop;01AA
eshsquatreversed;0285
esmallhiragana;3047
esmallkatakana;30A7
esmallkatakanahalfwidth;FF6A
estimated;212E
esuperior;F6EC
eta;03B7
etarmenian;0568
etatonos;03AE
eth;00F0
etilde;1EBD
etildebelow;1E1B
etnahtafoukhhebrew;0591
etnahtafoukhlefthebrew;0591
etnahtahebrew;0591
etnahtalefthebrew;0591
eturned;01DD
eukorean;3161
euro;20AC
evowelsignbengali;09C7
evowelsigndeva;0947
evowelsigngujarati;0AC7
exclam;0021
exclamarmenian;055C
exclamdbl;203C
exclamdown;00A1
exclamdownsmall;F7A1
exclammonospace;FF01
exclamsmall;F721
existential;2203
ezh;0292
ezhcaron;01EF
ezhcurl;0293
ezhreversed;01B9
ezhtail;01BA
f;0066
fadeva;095E
fagurmukhi;0A5E
fahrenheit;2109
fathaarabic;064E
fathalowarabic;064E
fathatanarabic;064B
fbopomofo;3108
fcircle;24D5
fdotaccent;1E1F
feharabic;0641
feharmenian;0586
fehfinalarabic;FED2
fehinitialarabic;FED3
fehmedialarabic;FED4
feicoptic;03E5
female;2640
ff;FB00
ffi;FB03
ffl;FB04
fi;FB01
fifteencircle;246E
fifteenparen;2482
fifteenperiod;2496
figuredash;2012
filledbox;25A0
filledrect;25AC
finalkaf;05DA
finalkafdagesh;FB3A
finalkafdageshhebrew;FB3A
finalkafhebrew;05DA
finalkafqamats;05DA 05B8
finalkafqamatshebrew;05DA 05B8
finalkafsheva;05DA 05B0
finalkafshevahebrew;05DA 05B0
finalmem;05DD
finalmemhebrew;05DD
finalnun;05DF
finalnunhebrew;05DF
finalpe;05E3
finalpehebrew;05E3
finaltsadi;05E5
finaltsadihebrew;05E5
firsttonechinese;02C9
fisheye;25C9
fitacyrillic;0473
five;0035
fivearabic;0665
fivebengali;09EB
fivecircle;2464
fivecircleinversesansserif;278E
fivedeva;096B
fiveeighths;215D
fivegujarati;0AEB
fivegurmukhi;0A6B
fivehackarabic;0665
fivehangzhou;3025
fiveideographicparen;3224
fiveinferior;2085
fivemonospace;FF15
fiveoldstyle;F735
fiveparen;2478
fiveperiod;248C
fivepersian;06F5
fiveroman;2174
fivesuperior;2075
fivethai;0E55
fl;FB02
florin;0192
fmonospace;FF46
fmsquare;3399
fofanthai;0E1F
fofathai;0E1D
fongmanthai;0E4F
forall;2200
four;0034
fourarabic;0664
fourbengali;09EA
fourcircle;2463
fourcircleinversesansserif;278D
fourdeva;096A
fourgujarati;0AEA
fourgurmukhi;0A6A
fourhackarabic;0664
fourhangzhou;3024
fourideographicparen;3223
fourinferior;2084
fourmonospace;FF14
fournumeratorbengali;09F7
fouroldstyle;F734
fourparen;2477
fourperiod;248B
fourpersian;06F4
fourroman;2173
foursuperior;2074
fourteencircle;246D
fourteenparen;2481
fourteenperiod;2495
fourthai;0E54
fourthtonechinese;02CB
fparen;24A1
fraction;2044
franc;20A3
g;0067
gabengali;0997
gacute;01F5
gadeva;0917
gafarabic;06AF
gaffinalarabic;FB93
gafinitialarabic;FB94
gafmedialarabic;FB95
gagujarati;0A97
gagurmukhi;0A17
gahiragana;304C
gakatakana;30AC
gamma;03B3
gammalatinsmall;0263
gammasuperior;02E0
gangiacoptic;03EB
gbopomofo;310D
gbreve;011F
gcaron;01E7
gcedilla;0123
gcircle;24D6
gcircumflex;011D
gcommaaccent;0123
gdot;0121
gdotaccent;0121
gecyrillic;0433
gehiragana;3052
gekatakana;30B2
geometricallyequal;2251
gereshaccenthebrew;059C
gereshhebrew;05F3
gereshmuqdamhebrew;059D
germandbls;00DF
gershayimaccenthebrew;059E
gershayimhebrew;05F4
getamark;3013
ghabengali;0998
ghadarmenian;0572
ghadeva;0918
ghagujarati;0A98
ghagurmukhi;0A18
ghainarabic;063A
ghainfinalarabic;FECE
ghaininitialarabic;FECF
ghainmedialarabic;FED0
ghemiddlehookcyrillic;0495
ghestrokecyrillic;0493
gheupturncyrillic;0491
ghhadeva;095A
ghhagurmukhi;0A5A
ghook;0260
ghzsquare;3393
gihiragana;304E
gikatakana;30AE
gimarmenian;0563
gimel;05D2
gimeldagesh;FB32
gimeldageshhebrew;FB32
gimelhebrew;05D2
gjecyrillic;0453
glottalinvertedstroke;01BE
glottalstop;0294
glottalstopinverted;0296
glottalstopmod;02C0
glottalstopreversed;0295
glottalstopreversedmod;02C1
glottalstopreversedsuperior;02E4
glottalstopstroke;02A1
glottalstopstrokereversed;02A2
gmacron;1E21
gmonospace;FF47
gohiragana;3054
gokatakana;30B4
gparen;24A2
gpasquare;33AC
gradient;2207
grave;0060
gravebelowcmb;0316
gravecmb;0300
gravecomb;0300
gravedeva;0953
gravelowmod;02CE
gravemonospace;FF40
gravetonecmb;0340
greater;003E
greaterequal;2265
greaterequalorless;22DB
greatermonospace;FF1E
greaterorequivalent;2273
greaterorless;2277
greateroverequal;2267
greatersmall;FE65
gscript;0261
gstroke;01E5
guhiragana;3050
guillemotleft;00AB
guillemotright;00BB
guilsinglleft;2039
guilsinglright;203A
gukatakana;30B0
guramusquare;3318
gysquare;33C9
h;0068
haabkhasiancyrillic;04A9
haaltonearabic;06C1
habengali;09B9
hadescendercyrillic;04B3
hadeva;0939
hagujarati;0AB9
hagurmukhi;0A39
haharabic;062D
hahfinalarabic;FEA2
hahinitialarabic;FEA3
hahiragana;306F
hahmedialarabic;FEA4
haitusquare;332A
hakatakana;30CF
hakatakanahalfwidth;FF8A
halantgurmukhi;0A4D
hamzaarabic;0621
hamzadammaarabic;0621 064F
hamzadammatanarabic;0621 064C
hamzafathaarabic;0621 064E
hamzafathatanarabic;0621 064B
hamzalowarabic;0621
hamzalowkasraarabic;0621 0650
hamzalowkasratanarabic;0621 064D
hamzasukunarabic;0621 0652
hangulfiller;3164
hardsigncyrillic;044A
harpoonleftbarbup;21BC
harpoonrightbarbup;21C0
hasquare;33CA
hatafpatah;05B2
hatafpatah16;05B2
hatafpatah23;05B2
hatafpatah2f;05B2
hatafpatahhebrew;05B2
hatafpatahnarrowhebrew;05B2
hatafpatahquarterhebrew;05B2
hatafpatahwidehebrew;05B2
hatafqamats;05B3
hatafqamats1b;05B3
hatafqamats28;05B3
hatafqamats34;05B3
hatafqamatshebrew;05B3
hatafqamatsnarrowhebrew;05B3
hatafqamatsquarterhebrew;05B3
hatafqamatswidehebrew;05B3
hatafsegol;05B1
hatafsegol17;05B1
hatafsegol24;05B1
hatafsegol30;05B1
hatafsegolhebrew;05B1
hatafsegolnarrowhebrew;05B1
hatafsegolquarterhebrew;05B1
hatafsegolwidehebrew;05B1
hbar;0127
hbopomofo;310F
hbrevebelow;1E2B
hcedilla;1E29
hcircle;24D7
hcircumflex;0125
hdieresis;1E27
hdotaccent;1E23
hdotbelow;1E25
he;05D4
heart;2665
heartsuitblack;2665
heartsuitwhite;2661
hedagesh;FB34
hedageshhebrew;FB34
hehaltonearabic;06C1
heharabic;0647
hehebrew;05D4
hehfinalaltonearabic;FBA7
hehfinalalttwoarabic;FEEA
hehfinalarabic;FEEA
hehhamzaabovefinalarabic;FBA5
hehhamzaaboveisolatedarabic;FBA4
hehinitialaltonearabic;FBA8
hehinitialarabic;FEEB
hehiragana;3078
hehmedialaltonearabic;FBA9
hehmedialarabic;FEEC
heiseierasquare;337B
hekatakana;30D8
hekatakanahalfwidth;FF8D
hekutaarusquare;3336
henghook;0267
herutusquare;3339
het;05D7
hethebrew;05D7
hhook;0266
hhooksuperior;02B1
hieuhacirclekorean;327B
hieuhaparenkorean;321B
hieuhcirclekorean;326D
hieuhkorean;314E
hieuhparenkorean;320D
hihiragana;3072
hikatakana;30D2
hikatakanahalfwidth;FF8B
hiriq;05B4
hiriq14;05B4
hiriq21;05B4
hiriq2d;05B4
hiriqhebrew;05B4
hiriqnarrowhebrew;05B4
hiriqquarterhebrew;05B4
hiriqwidehebrew;05B4
hlinebelow;1E96
hmonospace;FF48
hoarmenian;0570
hohipthai;0E2B
hohiragana;307B
hokatakana;30DB
hokatakanahalfwidth;FF8E
holam;05B9
holam19;05B9
holam26;05B9
holam32;05B9
holamhebrew;05B9
holamnarrowhebrew;05B9
holamquarterhebrew;05B9
holamwidehebrew;05B9
honokhukthai;0E2E
hookabovecomb;0309
hookcmb;0309
hookpalatalizedbelowcmb;0321
hookretroflexbelowcmb;0322
hoonsquare;3342
horicoptic;03E9
horizontalbar;2015
horncmb;031B
hotsprings;2668
house;2302
hparen;24A3
hsuperior;02B0
hturned;0265
huhiragana;3075
huiitosquare;3333
hukatakana;30D5
hukatakanahalfwidth;FF8C
hungarumlaut;02DD
hungarumlautcmb;030B
hv;0195
hyphen;002D
hypheninferior;F6E5
hyphenmonospace;FF0D
hyphensmall;FE63
hyphensuperior;F6E6
hyphentwo;2010
i;0069
iacute;00ED
iacyrillic;044F
ibengali;0987
ibopomofo;3127
ibreve;012D
icaron;01D0
icircle;24D8
icircumflex;00EE
icyrillic;0456
idblgrave;0209
ideographearthcircle;328F
ideographfirecircle;328B
ideographicallianceparen;323F
ideographiccallparen;323A
ideographiccentrecircle;32A5
ideographicclose;3006
ideographiccomma;3001
ideographiccommaleft;FF64
ideographiccongratulationparen;3237
ideographiccorrectcircle;32A3
ideographicearthparen;322F
ideographicenterpriseparen;323D
ideographicexcellentcircle;329D
ideographicfestivalparen;3240
ideographicfinancialcircle;3296
ideographicfinancialparen;3236
ideographicfireparen;322B
ideographichaveparen;3232
ideographichighcircle;32A4
ideographiciterationmark;3005
ideographiclaborcircle;3298
ideographiclaborparen;3238
ideographicleftcircle;32A7
ideographiclowcircle;32A6
ideographicmedicinecircle;32A9
ideographicmetalparen;322E
ideographicmoonparen;322A
ideographicnameparen;3234
ideographicperiod;3002
ideographicprintcircle;329E
ideographicreachparen;3243
ideographicrepresentparen;3239
ideographicresourceparen;323E
ideographicrightcircle;32A8
ideographicsecretcircle;3299
ideographicselfparen;3242
ideographicsocietyparen;3233
ideographicspace;3000
ideographicspecialparen;3235
ideographicstockparen;3231
ideographicstudyparen;323B
ideographicsunparen;3230
ideographicsuperviseparen;323C
ideographicwaterparen;322C
ideographicwoodparen;322D
ideographiczero;3007
ideographmetalcircle;328E
ideographmooncircle;328A
ideographnamecircle;3294
ideographsuncircle;3290
ideographwatercircle;328C
ideographwoodcircle;328D
ideva;0907
idieresis;00EF
idieresisacute;1E2F
idieresiscyrillic;04E5
idotbelow;1ECB
iebrevecyrillic;04D7
iecyrillic;0435
ieungacirclekorean;3275
ieungaparenkorean;3215
ieungcirclekorean;3267
ieungkorean;3147
ieungparenkorean;3207
igrave;00EC
igujarati;0A87
igurmukhi;0A07
ihiragana;3044
ihookabove;1EC9
iibengali;0988
iicyrillic;0438
iideva;0908
iigujarati;0A88
iigurmukhi;0A08
iimatragurmukhi;0A40
iinvertedbreve;020B
iishortcyrillic;0439
iivowelsignbengali;09C0
iivowelsigndeva;0940
iivowelsigngujarati;0AC0
ij;0133
ikatakana;30A4
ikatakanahalfwidth;FF72
ikorean;3163
ilde;02DC
iluyhebrew;05AC
imacron;012B
imacroncyrillic;04E3
imageorapproximatelyequal;2253
imatragurmukhi;0A3F
imonospace;FF49
increment;2206
infinity;221E
iniarmenian;056B
integral;222B
integralbottom;2321
integralbt;2321
integralex;F8F5
integraltop;2320
integraltp;2320
intersection;2229
intisquare;3305
invbullet;25D8
invcircle;25D9
invsmileface;263B
iocyrillic;0451
iogonek;012F
iota;03B9
iotadieresis;03CA
iotadieresistonos;0390
iotalatin;0269
iotatonos;03AF
iparen;24A4
irigurmukhi;0A72
ismallhiragana;3043
ismallkatakana;30A3
ismallkatakanahalfwidth;FF68
issharbengali;09FA
istroke;0268
isuperior;F6ED
iterationhiragana;309D
iterationkatakana;30FD
itilde;0129
itildebelow;1E2D
iubopomofo;3129
iucyrillic;044E
ivowelsignbengali;09BF
ivowelsigndeva;093F
ivowelsigngujarati;0ABF
izhitsacyrillic;0475
izhitsadblgravecyrillic;0477
j;006A
jaarmenian;0571
jabengali;099C
jadeva;091C
jagujarati;0A9C
jagurmukhi;0A1C
jbopomofo;3110
jcaron;01F0
jcircle;24D9
jcircumflex;0135
jcrossedtail;029D
jdotlessstroke;025F
jecyrillic;0458
jeemarabic;062C
jeemfinalarabic;FE9E
jeeminitialarabic;FE9F
jeemmedialarabic;FEA0
jeharabic;0698
jehfinalarabic;FB8B
jhabengali;099D
jhadeva;091D
jhagujarati;0A9D
jhagurmukhi;0A1D
jheharmenian;057B
jis;3004
jmonospace;FF4A
jparen;24A5
jsuperior;02B2
k;006B
kabashkircyrillic;04A1
kabengali;0995
kacute;1E31
kacyrillic;043A
kadescendercyrillic;049B
kadeva;0915
kaf;05DB
kafarabic;0643
kafdagesh;FB3B
kafdageshhebrew;FB3B
kaffinalarabic;FEDA
kafhebrew;05DB
kafinitialarabic;FEDB
kafmedialarabic;FEDC
kafrafehebrew;FB4D
kagujarati;0A95
kagurmukhi;0A15
kahiragana;304B
kahookcyrillic;04C4
kakatakana;30AB
kakatakanahalfwidth;FF76
kappa;03BA
kappasymbolgreek;03F0
kapyeounmieumkorean;3171
kapyeounphieuphkorean;3184
kapyeounpieupkorean;3178
kapyeounssangpieupkorean;3179
karoriisquare;330D
kashidaautoarabic;0640
kashidaautonosidebearingarabic;0640
kasmallkatakana;30F5
kasquare;3384
kasraarabic;0650
kasratanarabic;064D
kastrokecyrillic;049F
katahiraprolongmarkhalfwidth;FF70
kaverticalstrokecyrillic;049D
kbopomofo;310E
kcalsquare;3389
kcaron;01E9
kcedilla;0137
kcircle;24DA
kcommaaccent;0137
kdotbelow;1E33
keharmenian;0584
kehiragana;3051
kekatakana;30B1
kekatakanahalfwidth;FF79
kenarmenian;056F
kesmallkatakana;30F6
kgreenlandic;0138
khabengali;0996
khacyrillic;0445
khadeva;0916
khagujarati;0A96
khagurmukhi;0A16
khaharabic;062E
khahfinalarabic;FEA6
khahinitialarabic;FEA7
khahmedialarabic;FEA8
kheicoptic;03E7
khhadeva;0959
khhagurmukhi;0A59
khieukhacirclekorean;3278
khieukhaparenkorean;3218
khieukhcirclekorean;326A
khieukhkorean;314B
khieukhparenkorean;320A
khokhaithai;0E02
khokhonthai;0E05
khokhuatthai;0E03
khokhwaithai;0E04
khomutthai;0E5B
khook;0199
khorakhangthai;0E06
khzsquare;3391
kihiragana;304D
kikatakana;30AD
kikatakanahalfwidth;FF77
kiroguramusquare;3315
kiromeetorusquare;3316
kirosquare;3314
kiyeokacirclekorean;326E
kiyeokaparenkorean;320E
kiyeokcirclekorean;3260
kiyeokkorean;3131
kiyeokparenkorean;3200
kiyeoksioskorean;3133
kjecyrillic;045C
klinebelow;1E35
klsquare;3398
kmcubedsquare;33A6
kmonospace;FF4B
kmsquaredsquare;33A2
kohiragana;3053
kohmsquare;33C0
kokaithai;0E01
kokatakana;30B3
kokatakanahalfwidth;FF7A
kooposquare;331E
koppacyrillic;0481
koreanstandardsymbol;327F
koroniscmb;0343
kparen;24A6
kpasquare;33AA
ksicyrillic;046F
ktsquare;33CF
kturned;029E
kuhiragana;304F
kukatakana;30AF
kukatakanahalfwidth;FF78
kvsquare;33B8
kwsquare;33BE
l;006C
labengali;09B2
lacute;013A
ladeva;0932
lagujarati;0AB2
lagurmukhi;0A32
lakkhangyaothai;0E45
lamaleffinalarabic;FEFC
lamalefhamzaabovefinalarabic;FEF8
lamalefhamzaaboveisolatedarabic;FEF7
lamalefhamzabelowfinalarabic;FEFA
lamalefhamzabelowisolatedarabic;FEF9
lamalefisolatedarabic;FEFB
lamalefmaddaabovefinalarabic;FEF6
lamalefmaddaaboveisolatedarabic;FEF5
lamarabic;0644
lambda;03BB
lambdastroke;019B
lamed;05DC
lameddagesh;FB3C
lameddageshhebrew;FB3C
lamedhebrew;05DC
lamedholam;05DC 05B9
lamedholamdagesh;05DC 05B9 05BC
lamedholamdageshhebrew;05DC 05B9 05BC
lamedholamhebrew;05DC 05B9
lamfinalarabic;FEDE
lamhahinitialarabic;FCCA
laminitialarabic;FEDF
lamjeeminitialarabic;FCC9
lamkhahinitialarabic;FCCB
lamlamhehisolatedarabic;FDF2
lammedialarabic;FEE0
lammeemhahinitialarabic;FD88
lammeeminitialarabic;FCCC
lammeemjeeminitialarabic;FEDF FEE4 FEA0
lammeemkhahinitialarabic;FEDF FEE4 FEA8
largecircle;25EF
lbar;019A
lbelt;026C
lbopomofo;310C
lcaron;013E
lcedilla;013C
lcircle;24DB
lcircumflexbelow;1E3D
lcommaaccent;013C
ldot;0140
ldotaccent;0140
ldotbelow;1E37
ldotbelowmacron;1E39
leftangleabovecmb;031A
lefttackbelowcmb;0318
less;003C
lessequal;2264
lessequalorgreater;22DA
lessmonospace;FF1C
lessorequivalent;2272
lessorgreater;2276
lessoverequal;2266
lesssmall;FE64
lezh;026E
lfblock;258C
lhookretroflex;026D
lira;20A4
liwnarmenian;056C
lj;01C9
ljecyrillic;0459
ll;F6C0
lladeva;0933
llagujarati;0AB3
llinebelow;1E3B
llladeva;0934
llvocalicbengali;09E1
llvocalicdeva;0961
llvocalicvowelsignbengali;09E3
llvocalicvowelsigndeva;0963
lmiddletilde;026B
lmonospace;FF4C
lmsquare;33D0
lochulathai;0E2C
logicaland;2227
logicalnot;00AC
logicalnotreversed;2310
logicalor;2228
lolingthai;0E25
longs;017F
lowlinecenterline;FE4E
lowlinecmb;0332
lowlinedashed;FE4D
lozenge;25CA
lparen;24A7
lslash;0142
lsquare;2113
lsuperior;F6EE
ltshade;2591
luthai;0E26
lvocalicbengali;098C
lvocalicdeva;090C
lvocalicvowelsignbengali;09E2
lvocalicvowelsigndeva;0962
lxsquare;33D3
m;006D
mabengali;09AE
macron;00AF
macronbelowcmb;0331
macroncmb;0304
macronlowmod;02CD
macronmonospace;FFE3
macute;1E3F
madeva;092E
magujarati;0AAE
magurmukhi;0A2E
mahapakhhebrew;05A4
mahapakhlefthebrew;05A4
mahiragana;307E
maichattawalowleftthai;F895
maichattawalowrightthai;F894
maichattawathai;0E4B
maichattawaupperleftthai;F893
maieklowleftthai;F88C
maieklowrightthai;F88B
maiekthai;0E48
maiekupperleftthai;F88A
maihanakatleftthai;F884
maihanakatthai;0E31
maitaikhuleftthai;F889
maitaikhuthai;0E47
maitholowleftthai;F88F
maitholowrightthai;F88E
maithothai;0E49
maithoupperleftthai;F88D
maitrilowleftthai;F892
maitrilowrightthai;F891
maitrithai;0E4A
maitriupperleftthai;F890
maiyamokthai;0E46
makatakana;30DE
makatakanahalfwidth;FF8F
male;2642
mansyonsquare;3347
maqafhebrew;05BE
mars;2642
masoracirclehebrew;05AF
masquare;3383
mbopomofo;3107
mbsquare;33D4
mcircle;24DC
mcubedsquare;33A5
mdotaccent;1E41
mdotbelow;1E43
meemarabic;0645
meemfinalarabic;FEE2
meeminitialarabic;FEE3
meemmedialarabic;FEE4
meemmeeminitialarabic;FCD1
meemmeemisolatedarabic;FC48
meetorusquare;334D
mehiragana;3081
meizierasquare;337E
mekatakana;30E1
mekatakanahalfwidth;FF92
mem;05DE
memdagesh;FB3E
memdageshhebrew;FB3E
memhebrew;05DE
menarmenian;0574
merkhahebrew;05A5
merkhakefulahebrew;05A6
merkhakefulalefthebrew;05A6
merkhalefthebrew;05A5
mhook;0271
mhzsquare;3392
middledotkatakanahalfwidth;FF65
middot;00B7
mieumacirclekorean;3272
mieumaparenkorean;3212
mieumcirclekorean;3264
mieumkorean;3141
mieumpansioskorean;3170
mieumparenkorean;3204
mieumpieupkorean;316E
mieumsioskorean;316F
mihiragana;307F
mikatakana;30DF
mikatakanahalfwidth;FF90
minus;2212
minusbelowcmb;0320
minuscircle;2296
minusmod;02D7
minusplus;2213
minute;2032
miribaarusquare;334A
mirisquare;3349
mlonglegturned;0270
mlsquare;3396
mmcubedsquare;33A3
mmonospace;FF4D
mmsquaredsquare;339F
mohiragana;3082
mohmsquare;33C1
mokatakana;30E2
mokatakanahalfwidth;FF93
molsquare;33D6
momathai;0E21
moverssquare;33A7
moverssquaredsquare;33A8
mparen;24A8
mpasquare;33AB
mssquare;33B3
msuperior;F6EF
mturned;026F
mu;00B5
mu1;00B5
muasquare;3382
muchgreater;226B
muchless;226A
mufsquare;338C
mugreek;03BC
mugsquare;338D
muhiragana;3080
mukatakana;30E0
mukatakanahalfwidth;FF91
mulsquare;3395
multiply;00D7
mumsquare;339B
munahhebrew;05A3
munahlefthebrew;05A3
musicalnote;266A
musicalnotedbl;266B
musicflatsign;266D
musicsharpsign;266F
mussquare;33B2
muvsquare;33B6
muwsquare;33BC
mvmegasquare;33B9
mvsquare;33B7
mwmegasquare;33BF
mwsquare;33BD
n;006E
nabengali;09A8
nabla;2207
nacute;0144
nadeva;0928
nagujarati;0AA8
nagurmukhi;0A28
nahiragana;306A
nakatakana;30CA
nakatakanahalfwidth;FF85
napostrophe;0149
nasquare;3381
nbopomofo;310B
nbspace;00A0
ncaron;0148
ncedilla;0146
ncircle;24DD
ncircumflexbelow;1E4B
ncommaaccent;0146
ndotaccent;1E45
ndotbelow;1E47
nehiragana;306D
nekatakana;30CD
nekatakanahalfwidth;FF88
newsheqelsign;20AA
nfsquare;338B
ngabengali;0999
ngadeva;0919
ngagujarati;0A99
ngagurmukhi;0A19
ngonguthai;0E07
nhiragana;3093
nhookleft;0272
nhookretroflex;0273
nieunacirclekorean;326F
nieunaparenkorean;320F
nieuncieuckorean;3135
nieuncirclekorean;3261
nieunhieuhkorean;3136
nieunkorean;3134
nieunpansioskorean;3168
nieunparenkorean;3201
nieunsioskorean;3167
nieuntikeutkorean;3166
nihiragana;306B
nikatakana;30CB
nikatakanahalfwidth;FF86
nikhahitleftthai;F899
nikhahitthai;0E4D
nine;0039
ninearabic;0669
ninebengali;09EF
ninecircle;2468
ninecircleinversesansserif;2792
ninedeva;096F
ninegujarati;0AEF
ninegurmukhi;0A6F
ninehackarabic;0669
ninehangzhou;3029
nineideographicparen;3228
nineinferior;2089
ninemonospace;FF19
nineoldstyle;F739
nineparen;247C
nineperiod;2490
ninepersian;06F9
nineroman;2178
ninesuperior;2079
nineteencircle;2472
nineteenparen;2486
nineteenperiod;249A
ninethai;0E59
nj;01CC
njecyrillic;045A
nkatakana;30F3
nkatakanahalfwidth;FF9D
nlegrightlong;019E
nlinebelow;1E49
nmonospace;FF4E
nmsquare;339A
nnabengali;09A3
nnadeva;0923
nnagujarati;0AA3
nnagurmukhi;0A23
nnnadeva;0929
nohiragana;306E
nokatakana;30CE
nokatakanahalfwidth;FF89
nonbreakingspace;00A0
nonenthai;0E13
nonuthai;0E19
noonarabic;0646
noonfinalarabic;FEE6
noonghunnaarabic;06BA
noonghunnafinalarabic;FB9F
noonhehinitialarabic;FEE7 FEEC
nooninitialarabic;FEE7
noonjeeminitialarabic;FCD2
noonjeemisolatedarabic;FC4B
noonmedialarabic;FEE8
noonmeeminitialarabic;FCD5
noonmeemisolatedarabic;FC4E
noonnoonfinalarabic;FC8D
notcontains;220C
notelement;2209
notelementof;2209
notequal;2260
notgreater;226F
notgreaternorequal;2271
notgreaternorless;2279
notidentical;2262
notless;226E
notlessnorequal;2270
notparallel;2226
notprecedes;2280
notsubset;2284
notsucceeds;2281
notsuperset;2285
nowarmenian;0576
nparen;24A9
nssquare;33B1
nsuperior;207F
ntilde;00F1
nu;03BD
nuhiragana;306C
nukatakana;30CC
nukatakanahalfwidth;FF87
nuktabengali;09BC
nuktadeva;093C
nuktagujarati;0ABC
nuktagurmukhi;0A3C
numbersign;0023
numbersignmonospace;FF03
numbersignsmall;FE5F
numeralsigngreek;0374
numeralsignlowergreek;0375
numero;2116
nun;05E0
nundagesh;FB40
nundageshhebrew;FB40
nunhebrew;05E0
nvsquare;33B5
nwsquare;33BB
nyabengali;099E
nyadeva;091E
nyagujarati;0A9E
nyagurmukhi;0A1E
o;006F
oacute;00F3
oangthai;0E2D
obarred;0275
obarredcyrillic;04E9
obarreddieresiscyrillic;04EB
obengali;0993
obopomofo;311B
obreve;014F
ocandradeva;0911
ocandragujarati;0A91
ocandravowelsigndeva;0949
ocandravowelsigngujarati;0AC9
ocaron;01D2
ocircle;24DE
ocircumflex;00F4
ocircumflexacute;1ED1
ocircumflexdotbelow;1ED9
ocircumflexgrave;1ED3
ocircumflexhookabove;1ED5
ocircumflextilde;1ED7
ocyrillic;043E
odblacute;0151
odblgrave;020D
odeva;0913
odieresis;00F6
odieresiscyrillic;04E7
odotbelow;1ECD
oe;0153
oekorean;315A
ogonek;02DB
ogonekcmb;0328
ograve;00F2
ogujarati;0A93
oharmenian;0585
ohiragana;304A
ohookabove;1ECF
ohorn;01A1
ohornacute;1EDB
ohorndotbelow;1EE3
ohorngrave;1EDD
ohornhookabove;1EDF
ohorntilde;1EE1
ohungarumlaut;0151
oi;01A3
oinvertedbreve;020F
okatakana;30AA
okatakanahalfwidth;FF75
okorean;3157
olehebrew;05AB
omacron;014D
omacronacute;1E53
omacrongrave;1E51
omdeva;0950
omega;03C9
omega1;03D6
omegacyrillic;0461
omegalatinclosed;0277
omegaroundcyrillic;047B
omegatitlocyrillic;047D
omegatonos;03CE
omgujarati;0AD0
omicron;03BF
omicrontonos;03CC
omonospace;FF4F
one;0031
onearabic;0661
onebengali;09E7
onecircle;2460
onecircleinversesansserif;278A
onedeva;0967
onedotenleader;2024
oneeighth;215B
onefitted;F6DC
onegujarati;0AE7
onegurmukhi;0A67
onehackarabic;0661
onehalf;00BD
onehangzhou;3021
oneideographicparen;3220
oneinferior;2081
onemonospace;FF11
onenumeratorbengali;09F4
oneoldstyle;F731
oneparen;2474
oneperiod;2488
onepersian;06F1
onequarter;00BC
oneroman;2170
onesuperior;00B9
onethai;0E51
onethird;2153
oogonek;01EB
oogonekmacron;01ED
oogurmukhi;0A13
oomatragurmukhi;0A4B
oopen;0254
oparen;24AA
openbullet;25E6
option;2325
ordfeminine;00AA
ordmasculine;00BA
orthogonal;221F
oshortdeva;0912
oshortvowelsigndeva;094A
oslash;00F8
oslashacute;01FF
osmallhiragana;3049
osmallkatakana;30A9
osmallkatakanahalfwidth;FF6B
ostrokeacute;01FF
osuperior;F6F0
otcyrillic;047F
otilde;00F5
otildeacute;1E4D
otildedieresis;1E4F
oubopomofo;3121
overline;203E
overlinecenterline;FE4A
overlinecmb;0305
overlinedashed;FE49
overlinedblwavy;FE4C
overlinewavy;FE4B
overscore;00AF
ovowelsignbengali;09CB
ovowelsigndeva;094B
ovowelsigngujarati;0ACB
p;0070
paampssquare;3380
paasentosquare;332B
pabengali;09AA
pacute;1E55
padeva;092A
pagedown;21DF
pageup;21DE
pagujarati;0AAA
pagurmukhi;0A2A
pahiragana;3071
paiyannoithai;0E2F
pakatakana;30D1
palatalizationcyrilliccmb;0484
palochkacyrillic;04C0
pansioskorean;317F
paragraph;00B6
parallel;2225
parenleft;0028
parenleftaltonearabic;FD3E
parenleftbt;F8ED
parenleftex;F8EC
parenleftinferior;208D
parenleftmonospace;FF08
parenleftsmall;FE59
parenleftsuperior;207D
parenlefttp;F8EB
parenleftvertical;FE35
parenright;0029
parenrightaltonearabic;FD3F
parenrightbt;F8F8
parenrightex;F8F7
parenrightinferior;208E
parenrightmonospace;FF09
parenrightsmall;FE5A
parenrightsuperior;207E
parenrighttp;F8F6
parenrightvertical;FE36
partialdiff;2202
paseqhebrew;05C0
pashtahebrew;0599
pasquare;33A9
patah;05B7
patah11;05B7
patah1d;05B7
patah2a;05B7
patahhebrew;05B7
patahnarrowhebrew;05B7
patahquarterhebrew;05B7
patahwidehebrew;05B7
pazerhebrew;05A1
pbopomofo;3106
pcircle;24DF
pdotaccent;1E57
pe;05E4
pecyrillic;043F
pedagesh;FB44
pedageshhebrew;FB44
peezisquare;333B
pefinaldageshhebrew;FB43
peharabic;067E
peharmenian;057A
pehebrew;05E4
pehfinalarabic;FB57
pehinitialarabic;FB58
pehiragana;307A
pehmedialarabic;FB59
pekatakana;30DA
pemiddlehookcyrillic;04A7
perafehebrew;FB4E
percent;0025
percentarabic;066A
percentmonospace;FF05
percentsmall;FE6A
period;002E
periodarmenian;0589
periodcentered;00B7
periodhalfwidth;FF61
periodinferior;F6E7
periodmonospace;FF0E
periodsmall;FE52
periodsuperior;F6E8
perispomenigreekcmb;0342
perpendicular;22A5
perthousand;2030
peseta;20A7
pfsquare;338A
phabengali;09AB
phadeva;092B
phagujarati;0AAB
phagurmukhi;0A2B
phi;03C6
phi1;03D5
phieuphacirclekorean;327A
phieuphaparenkorean;321A
phieuphcirclekorean;326C
phieuphkorean;314D
phieuphparenkorean;320C
philatin;0278
phinthuthai;0E3A
phisymbolgreek;03D5
phook;01A5
phophanthai;0E1E
phophungthai;0E1C
phosamphaothai;0E20
pi;03C0
pieupacirclekorean;3273
pieupaparenkorean;3213
pieupcieuckorean;3176
pieupcirclekorean;3265
pieupkiyeokkorean;3172
pieupkorean;3142
pieupparenkorean;3205
pieupsioskiyeokkorean;3174
pieupsioskorean;3144
pieupsiostikeutkorean;3175
pieupthieuthkorean;3177
pieuptikeutkorean;3173
pihiragana;3074
pikatakana;30D4
pisymbolgreek;03D6
piwrarmenian;0583
plus;002B
plusbelowcmb;031F
pluscircle;2295
plusminus;00B1
plusmod;02D6
plusmonospace;FF0B
plussmall;FE62
plussuperior;207A
pmonospace;FF50
pmsquare;33D8
pohiragana;307D
pointingindexdownwhite;261F
pointingindexleftwhite;261C
pointingindexrightwhite;261E
pointingindexupwhite;261D
pokatakana;30DD
poplathai;0E1B
postalmark;3012
postalmarkface;3020
pparen;24AB
precedes;227A
prescription;211E
primemod;02B9
primereversed;2035
product;220F
projective;2305
prolongedkana;30FC
propellor;2318
propersubset;2282
propersuperset;2283
proportion;2237
proportional;221D
psi;03C8
psicyrillic;0471
psilipneumatacyrilliccmb;0486
pssquare;33B0
puhiragana;3077
pukatakana;30D7
pvsquare;33B4
pwsquare;33BA
q;0071
qadeva;0958
qadmahebrew;05A8
qafarabic;0642
qaffinalarabic;FED6
qafinitialarabic;FED7
qafmedialarabic;FED8
qamats;05B8
qamats10;05B8
qamats1a;05B8
qamats1c;05B8
qamats27;05B8
qamats29;05B8
qamats33;05B8
qamatsde;05B8
qamatshebrew;05B8
qamatsnarrowhebrew;05B8
qamatsqatanhebrew;05B8
qamatsqatannarrowhebrew;05B8
qamatsqatanquarterhebrew;05B8
qamatsqatanwidehebrew;05B8
qamatsquarterhebrew;05B8
qamatswidehebrew;05B8
qarneyparahebrew;059F
qbopomofo;3111
qcircle;24E0
qhook;02A0
qmonospace;FF51
qof;05E7
qofdagesh;FB47
qofdageshhebrew;FB47
qofhatafpatah;05E7 05B2
qofhatafpatahhebrew;05E7 05B2
qofhatafsegol;05E7 05B1
qofhatafsegolhebrew;05E7 05B1
qofhebrew;05E7
qofhiriq;05E7 05B4
qofhiriqhebrew;05E7 05B4
qofholam;05E7 05B9
qofholamhebrew;05E7 05B9
qofpatah;05E7 05B7
qofpatahhebrew;05E7 05B7
qofqamats;05E7 05B8
qofqamatshebrew;05E7 05B8
qofqubuts;05E7 05BB
qofqubutshebrew;05E7 05BB
qofsegol;05E7 05B6
qofsegolhebrew;05E7 05B6
qofsheva;05E7 05B0
qofshevahebrew;05E7 05B0
qoftsere;05E7 05B5
qoftserehebrew;05E7 05B5
qparen;24AC
quarternote;2669
qubuts;05BB
qubuts18;05BB
qubuts25;05BB
qubuts31;05BB
qubutshebrew;05BB
qubutsnarrowhebrew;05BB
qubutsquarterhebrew;05BB
qubutswidehebrew;05BB
question;003F
questionarabic;061F
questionarmenian;055E
questiondown;00BF
questiondownsmall;F7BF
questiongreek;037E
questionmonospace;FF1F
questionsmall;F73F
quotedbl;0022
quotedblbase;201E
quotedblleft;201C
quotedblmonospace;FF02
quotedblprime;301E
quotedblprimereversed;301D
quotedblright;201D
quoteleft;2018
quoteleftreversed;201B
quotereversed;201B
quoteright;2019
quoterightn;0149
quotesinglbase;201A
quotesingle;0027
quotesinglemonospace;FF07
r;0072
raarmenian;057C
rabengali;09B0
racute;0155
radeva;0930
radical;221A
radicalex;F8E5
radoverssquare;33AE
radoverssquaredsquare;33AF
radsquare;33AD
rafe;05BF
rafehebrew;05BF
ragujarati;0AB0
ragurmukhi;0A30
rahiragana;3089
rakatakana;30E9
rakatakanahalfwidth;FF97
ralowerdiagonalbengali;09F1
ramiddlediagonalbengali;09F0
ramshorn;0264
ratio;2236
rbopomofo;3116
rcaron;0159
rcedilla;0157
rcircle;24E1
rcommaaccent;0157
rdblgrave;0211
rdotaccent;1E59
rdotbelow;1E5B
rdotbelowmacron;1E5D
referencemark;203B
reflexsubset;2286
reflexsuperset;2287
registered;00AE
registersans;F8E8
registerserif;F6DA
reharabic;0631
reharmenian;0580
rehfinalarabic;FEAE
rehiragana;308C
rehyehaleflamarabic;0631 FEF3 FE8E 0644
rekatakana;30EC
rekatakanahalfwidth;FF9A
resh;05E8
reshdageshhebrew;FB48
reshhatafpatah;05E8 05B2
reshhatafpatahhebrew;05E8 05B2
reshhatafsegol;05E8 05B1
reshhatafsegolhebrew;05E8 05B1
reshhebrew;05E8
reshhiriq;05E8 05B4
reshhiriqhebrew;05E8 05B4
reshholam;05E8 05B9
reshholamhebrew;05E8 05B9
reshpatah;05E8 05B7
reshpatahhebrew;05E8 05B7
reshqamats;05E8 05B8
reshqamatshebrew;05E8 05B8
reshqubuts;05E8 05BB
reshqubutshebrew;05E8 05BB
reshsegol;05E8 05B6
reshsegolhebrew;05E8 05B6
reshsheva;05E8 05B0
reshshevahebrew;05E8 05B0
reshtsere;05E8 05B5
reshtserehebrew;05E8 05B5
reversedtilde;223D
reviahebrew;0597
reviamugrashhebrew;0597
revlogicalnot;2310
rfishhook;027E
rfishhookreversed;027F
rhabengali;09DD
rhadeva;095D
rho;03C1
rhook;027D
rhookturned;027B
rhookturnedsuperior;02B5
rhosymbolgreek;03F1
rhotichookmod;02DE
rieulacirclekorean;3271
rieulaparenkorean;3211
rieulcirclekorean;3263
rieulhieuhkorean;3140
rieulkiyeokkorean;313A
rieulkiyeoksioskorean;3169
rieulkorean;3139
rieulmieumkorean;313B
rieulpansioskorean;316C
rieulparenkorean;3203
rieulphieuphkorean;313F
rieulpieupkorean;313C
rieulpieupsioskorean;316B
rieulsioskorean;313D
rieulthieuthkorean;313E
rieultikeutkorean;316A
rieulyeorinhieuhkorean;316D
rightangle;221F
righttackbelowcmb;0319
righttriangle;22BF
rihiragana;308A
rikatakana;30EA
rikatakanahalfwidth;FF98
ring;02DA
ringbelowcmb;0325
ringcmb;030A
ringhalfleft;02BF
ringhalfleftarmenian;0559
ringhalfleftbelowcmb;031C
ringhalfleftcentered;02D3
ringhalfright;02BE
ringhalfrightbelowcmb;0339
ringhalfrightcentered;02D2
rinvertedbreve;0213
rittorusquare;3351
rlinebelow;1E5F
rlongleg;027C
rlonglegturned;027A
rmonospace;FF52
rohiragana;308D
rokatakana;30ED
rokatakanahalfwidth;FF9B
roruathai;0E23
rparen;24AD
rrabengali;09DC
rradeva;0931
rragurmukhi;0A5C
rreharabic;0691
rrehfinalarabic;FB8D
rrvocalicbengali;09E0
rrvocalicdeva;0960
rrvocalicgujarati;0AE0
rrvocalicvowelsignbengali;09C4
rrvocalicvowelsigndeva;0944
rrvocalicvowelsigngujarati;0AC4
rsuperior;F6F1
rtblock;2590
rturned;0279
rturnedsuperior;02B4
ruhiragana;308B
rukatakana;30EB
rukatakanahalfwidth;FF99
rupeemarkbengali;09F2
rupeesignbengali;09F3
rupiah;F6DD
ruthai;0E24
rvocalicbengali;098B
rvocalicdeva;090B
rvocalicgujarati;0A8B
rvocalicvowelsignbengali;09C3
rvocalicvowelsigndeva;0943
rvocalicvowelsigngujarati;0AC3
s;0073
sabengali;09B8
sacute;015B
sacutedotaccent;1E65
sadarabic;0635
sadeva;0938
sadfinalarabic;FEBA
sadinitialarabic;FEBB
sadmedialarabic;FEBC
sagujarati;0AB8
sagurmukhi;0A38
sahiragana;3055
sakatakana;30B5
sakatakanahalfwidth;FF7B
sallallahoualayhewasallamarabic;FDFA
samekh;05E1
samekhdagesh;FB41
samekhdageshhebrew;FB41
samekhhebrew;05E1
saraaathai;0E32
saraaethai;0E41
saraaimaimalaithai;0E44
saraaimaimuanthai;0E43
saraamthai;0E33
saraathai;0E30
saraethai;0E40
saraiileftthai;F886
saraiithai;0E35
saraileftthai;F885
saraithai;0E34
saraothai;0E42
saraueeleftthai;F888
saraueethai;0E37
saraueleftthai;F887
sarauethai;0E36
sarauthai;0E38
sarauuthai;0E39
sbopomofo;3119
scaron;0161
scarondotaccent;1E67
scedilla;015F
schwa;0259
schwacyrillic;04D9
schwadieresiscyrillic;04DB
schwahook;025A
scircle;24E2
scircumflex;015D
scommaaccent;0219
sdotaccent;1E61
sdotbelow;1E63
sdotbelowdotaccent;1E69
seagullbelowcmb;033C
second;2033
secondtonechinese;02CA
section;00A7
seenarabic;0633
seenfinalarabic;FEB2
seeninitialarabic;FEB3
seenmedialarabic;FEB4
segol;05B6
segol13;05B6
segol1f;05B6
segol2c;05B6
segolhebrew;05B6
segolnarrowhebrew;05B6
segolquarterhebrew;05B6
segoltahebrew;0592
segolwidehebrew;05B6
seharmenian;057D
sehiragana;305B
sekatakana;30BB
sekatakanahalfwidth;FF7E
semicolon;003B
semicolonarabic;061B
semicolonmonospace;FF1B
semicolonsmall;FE54
semivoicedmarkkana;309C
semivoicedmarkkanahalfwidth;FF9F
sentisquare;3322
sentosquare;3323
seven;0037
sevenarabic;0667
sevenbengali;09ED
sevencircle;2466
sevencircleinversesansserif;2790
sevendeva;096D
seveneighths;215E
sevengujarati;0AED
sevengurmukhi;0A6D
sevenhackarabic;0667
sevenhangzhou;3027
sevenideographicparen;3226
seveninferior;2087
sevenmonospace;FF17
sevenoldstyle;F737
sevenparen;247A
sevenperiod;248E
sevenpersian;06F7
sevenroman;2176
sevensuperior;2077
seventeencircle;2470
seventeenparen;2484
seventeenperiod;2498
seventhai;0E57
sfthyphen;00AD
shaarmenian;0577
shabengali;09B6
shacyrillic;0448
shaddaarabic;0651
shaddadammaarabic;FC61
shaddadammatanarabic;FC5E
shaddafathaarabic;FC60
shaddafathatanarabic;0651 064B
shaddakasraarabic;FC62
shaddakasratanarabic;FC5F
shade;2592
shadedark;2593
shadelight;2591
shademedium;2592
shadeva;0936
shagujarati;0AB6
shagurmukhi;0A36
shalshelethebrew;0593
shbopomofo;3115
shchacyrillic;0449
sheenarabic;0634
sheenfinalarabic;FEB6
sheeninitialarabic;FEB7
sheenmedialarabic;FEB8
sheicoptic;03E3
sheqel;20AA
sheqelhebrew;20AA
sheva;05B0
sheva115;05B0
sheva15;05B0
sheva22;05B0
sheva2e;05B0
shevahebrew;05B0
shevanarrowhebrew;05B0
shevaquarterhebrew;05B0
shevawidehebrew;05B0
shhacyrillic;04BB
shimacoptic;03ED
shin;05E9
shindagesh;FB49
shindageshhebrew;FB49
shindageshshindot;FB2C
shindageshshindothebrew;FB2C
shindageshsindot;FB2D
shindageshsindothebrew;FB2D
shindothebrew;05C1
shinhebrew;05E9
shinshindot;FB2A
shinshindothebrew;FB2A
shinsindot;FB2B
shinsindothebrew;FB2B
shook;0282
sigma;03C3
sigma1;03C2
sigmafinal;03C2
sigmalunatesymbolgreek;03F2
sihiragana;3057
sikatakana;30B7
sikatakanahalfwidth;FF7C
siluqhebrew;05BD
siluqlefthebrew;05BD
similar;223C
sindothebrew;05C2
siosacirclekorean;3274
siosaparenkorean;3214
sioscieuckorean;317E
sioscirclekorean;3266
sioskiyeokkorean;317A
sioskorean;3145
siosnieunkorean;317B
siosparenkorean;3206
siospieupkorean;317D
siostikeutkorean;317C
six;0036
sixarabic;0666
sixbengali;09EC
sixcircle;2465
sixcircleinversesansserif;278F
sixdeva;096C
sixgujarati;0AEC
sixgurmukhi;0A6C
sixhackarabic;0666
sixhangzhou;3026
sixideographicparen;3225
sixinferior;2086
sixmonospace;FF16
sixoldstyle;F736
sixparen;2479
sixperiod;248D
sixpersian;06F6
sixroman;2175
sixsuperior;2076
sixteencircle;246F
sixteencurrencydenominatorbengali;09F9
sixteenparen;2483
sixteenperiod;2497
sixthai;0E56
slash;002F
slashmonospace;FF0F
slong;017F
slongdotaccent;1E9B
smileface;263A
smonospace;FF53
sofpasuqhebrew;05C3
softhyphen;00AD
softsigncyrillic;044C
sohiragana;305D
sokatakana;30BD
sokatakanahalfwidth;FF7F
soliduslongoverlaycmb;0338
solidusshortoverlaycmb;0337
sorusithai;0E29
sosalathai;0E28
sosothai;0E0B
sosuathai;0E2A
space;0020
spacehackarabic;0020
spade;2660
spadesuitblack;2660
spadesuitwhite;2664
sparen;24AE
squarebelowcmb;033B
squarecc;33C4
squarecm;339D
squarediagonalcrosshatchfill;25A9
squarehorizontalfill;25A4
squarekg;338F
squarekm;339E
squarekmcapital;33CE
squareln;33D1
squarelog;33D2
squaremg;338E
squaremil;33D5
squaremm;339C
squaremsquared;33A1
squareorthogonalcrosshatchfill;25A6
squareupperlefttolowerrightfill;25A7
squareupperrighttolowerleftfill;25A8
squareverticalfill;25A5
squarewhitewithsmallblack;25A3
srsquare;33DB
ssabengali;09B7
ssadeva;0937
ssagujarati;0AB7
ssangcieuckorean;3149
ssanghieuhkorean;3185
ssangieungkorean;3180
ssangkiyeokkorean;3132
ssangnieunkorean;3165
ssangpieupkorean;3143
ssangsioskorean;3146
ssangtikeutkorean;3138
ssuperior;F6F2
sterling;00A3
sterlingmonospace;FFE1
strokelongoverlaycmb;0336
strokeshortoverlaycmb;0335
subset;2282
subsetnotequal;228A
subsetorequal;2286
succeeds;227B
suchthat;220B
suhiragana;3059
sukatakana;30B9
sukatakanahalfwidth;FF7D
sukunarabic;0652
summation;2211
sun;263C
superset;2283
supersetnotequal;228B
supersetorequal;2287
svsquare;33DC
syouwaerasquare;337C
t;0074
tabengali;09A4
tackdown;22A4
tackleft;22A3
tadeva;0924
tagujarati;0AA4
tagurmukhi;0A24
taharabic;0637
tahfinalarabic;FEC2
tahinitialarabic;FEC3
tahiragana;305F
tahmedialarabic;FEC4
taisyouerasquare;337D
takatakana;30BF
takatakanahalfwidth;FF80
tatweelarabic;0640
tau;03C4
tav;05EA
tavdages;FB4A
tavdagesh;FB4A
tavdageshhebrew;FB4A
tavhebrew;05EA
tbar;0167
tbopomofo;310A
tcaron;0165
tccurl;02A8
tcedilla;0163
tcheharabic;0686
tchehfinalarabic;FB7B
tchehinitialarabic;FB7C
tchehmedialarabic;FB7D
tchehmeeminitialarabic;FB7C FEE4
tcircle;24E3
tcircumflexbelow;1E71
tcommaaccent;0163
tdieresis;1E97
tdotaccent;1E6B
tdotbelow;1E6D
tecyrillic;0442
tedescendercyrillic;04AD
teharabic;062A
tehfinalarabic;FE96
tehhahinitialarabic;FCA2
tehhahisolatedarabic;FC0C
tehinitialarabic;FE97
tehiragana;3066
tehjeeminitialarabic;FCA1
tehjeemisolatedarabic;FC0B
tehmarbutaarabic;0629
tehmarbutafinalarabic;FE94
tehmedialarabic;FE98
tehmeeminitialarabic;FCA4
tehmeemisolatedarabic;FC0E
tehnoonfinalarabic;FC73
tekatakana;30C6
tekatakanahalfwidth;FF83
telephone;2121
telephoneblack;260E
telishagedolahebrew;05A0
telishaqetanahebrew;05A9
tencircle;2469
tenideographicparen;3229
tenparen;247D
tenperiod;2491
tenroman;2179
tesh;02A7
tet;05D8
tetdagesh;FB38
tetdageshhebrew;FB38
tethebrew;05D8
tetsecyrillic;04B5
tevirhebrew;059B
tevirlefthebrew;059B
thabengali;09A5
thadeva;0925
thagujarati;0AA5
thagurmukhi;0A25
thalarabic;0630
thalfinalarabic;FEAC
thanthakhatlowleftthai;F898
thanthakhatlowrightthai;F897
thanthakhatthai;0E4C
thanthakhatupperleftthai;F896
theharabic;062B
thehfinalarabic;FE9A
thehinitialarabic;FE9B
thehmedialarabic;FE9C
thereexists;2203
therefore;2234
theta;03B8
theta1;03D1
thetasymbolgreek;03D1
thieuthacirclekorean;3279
thieuthaparenkorean;3219
thieuthcirclekorean;326B
thieuthkorean;314C
thieuthparenkorean;320B
thirteencircle;246C
thirteenparen;2480
thirteenperiod;2494
thonangmonthothai;0E11
thook;01AD
thophuthaothai;0E12
thorn;00FE
thothahanthai;0E17
thothanthai;0E10
thothongthai;0E18
thothungthai;0E16
thousandcyrillic;0482
thousandsseparatorarabic;066C
thousandsseparatorpersian;066C
three;0033
threearabic;0663
threebengali;09E9
threecircle;2462
threecircleinversesansserif;278C
threedeva;0969
threeeighths;215C
threegujarati;0AE9
threegurmukhi;0A69
threehackarabic;0663
threehangzhou;3023
threeideographicparen;3222
threeinferior;2083
threemonospace;FF13
threenumeratorbengali;09F6
threeoldstyle;F733
threeparen;2476
threeperiod;248A
threepersian;06F3
threequarters;00BE
threequartersemdash;F6DE
threeroman;2172
threesuperior;00B3
threethai;0E53
thzsquare;3394
tihiragana;3061
tikatakana;30C1
tikatakanahalfwidth;FF81
tikeutacirclekorean;3270
tikeutaparenkorean;3210
tikeutcirclekorean;3262
tikeutkorean;3137
tikeutparenkorean;3202
tilde;02DC
tildebelowcmb;0330
tildecmb;0303
tildecomb;0303
tildedoublecmb;0360
tildeoperator;223C
tildeoverlaycmb;0334
tildeverticalcmb;033E
timescircle;2297
tipehahebrew;0596
tipehalefthebrew;0596
tippigurmukhi;0A70
titlocyrilliccmb;0483
tiwnarmenian;057F
tlinebelow;1E6F
tmonospace;FF54
toarmenian;0569
tohiragana;3068
tokatakana;30C8
tokatakanahalfwidth;FF84
tonebarextrahighmod;02E5
tonebarextralowmod;02E9
tonebarhighmod;02E6
tonebarlowmod;02E8
tonebarmidmod;02E7
tonefive;01BD
tonesix;0185
tonetwo;01A8
tonos;0384
tonsquare;3327
topatakthai;0E0F
tortoiseshellbracketleft;3014
tortoiseshellbracketleftsmall;FE5D
tortoiseshellbracketleftvertical;FE39
tortoiseshellbracketright;3015
tortoiseshellbracketrightsmall;FE5E
tortoiseshellbracketrightvertical;FE3A
totaothai;0E15
tpalatalhook;01AB
tparen;24AF
trademark;2122
trademarksans;F8EA
trademarkserif;F6DB
tretroflexhook;0288
triagdn;25BC
triaglf;25C4
triagrt;25BA
triagup;25B2
ts;02A6
tsadi;05E6
tsadidagesh;FB46
tsadidageshhebrew;FB46
tsadihebrew;05E6
tsecyrillic;0446
tsere;05B5
tsere12;05B5
tsere1e;05B5
tsere2b;05B5
tserehebrew;05B5
tserenarrowhebrew;05B5
tserequarterhebrew;05B5
tserewidehebrew;05B5
tshecyrillic;045B
tsuperior;F6F3
ttabengali;099F
ttadeva;091F
ttagujarati;0A9F
ttagurmukhi;0A1F
tteharabic;0679
ttehfinalarabic;FB67
ttehinitialarabic;FB68
ttehmedialarabic;FB69
tthabengali;09A0
tthadeva;0920
tthagujarati;0AA0
tthagurmukhi;0A20
tturned;0287
tuhiragana;3064
tukatakana;30C4
tukatakanahalfwidth;FF82
tusmallhiragana;3063
tusmallkatakana;30C3
tusmallkatakanahalfwidth;FF6F
twelvecircle;246B
twelveparen;247F
twelveperiod;2493
twelveroman;217B
twentycircle;2473
twentyhangzhou;5344
twentyparen;2487
twentyperiod;249B
two;0032
twoarabic;0662
twobengali;09E8
twocircle;2461
twocircleinversesansserif;278B
twodeva;0968
twodotenleader;2025
twodotleader;2025
twodotleadervertical;FE30
twogujarati;0AE8
twogurmukhi;0A68
twohackarabic;0662
twohangzhou;3022
twoideographicparen;3221
twoinferior;2082
twomonospace;FF12
twonumeratorbengali;09F5
twooldstyle;F732
twoparen;2475
twoperiod;2489
twopersian;06F2
tworoman;2171
twostroke;01BB
twosuperior;00B2
twothai;0E52
twothirds;2154
u;0075
uacute;00FA
ubar;0289
ubengali;0989
ubopomofo;3128
ubreve;016D
ucaron;01D4
ucircle;24E4
ucircumflex;00FB
ucircumflexbelow;1E77
ucyrillic;0443
udattadeva;0951
udblacute;0171
udblgrave;0215
udeva;0909
udieresis;00FC
udieresisacute;01D8
udieresisbelow;1E73
udieresiscaron;01DA
udieresiscyrillic;04F1
udieresisgrave;01DC
udieresismacron;01D6
udotbelow;1EE5
ugrave;00F9
ugujarati;0A89
ugurmukhi;0A09
uhiragana;3046
uhookabove;1EE7
uhorn;01B0
uhornacute;1EE9
uhorndotbelow;1EF1
uhorngrave;1EEB
uhornhookabove;1EED
uhorntilde;1EEF
uhungarumlaut;0171
uhungarumlautcyrillic;04F3
uinvertedbreve;0217
ukatakana;30A6
ukatakanahalfwidth;FF73
ukcyrillic;0479
ukorean;315C
umacron;016B
umacroncyrillic;04EF
umacrondieresis;1E7B
umatragurmukhi;0A41
umonospace;FF55
underscore;005F
underscoredbl;2017
underscoremonospace;FF3F
underscorevertical;FE33
underscorewavy;FE4F
union;222A
universal;2200
uogonek;0173
uparen;24B0
upblock;2580
upperdothebrew;05C4
upsilon;03C5
upsilondieresis;03CB
upsilondieresistonos;03B0
upsilonlatin;028A
upsilontonos;03CD
uptackbelowcmb;031D
uptackmod;02D4
uragurmukhi;0A73
uring;016F
ushortcyrillic;045E
usmallhiragana;3045
usmallkatakana;30A5
usmallkatakanahalfwidth;FF69
ustraightcyrillic;04AF
ustraightstrokecyrillic;04B1
utilde;0169
utildeacute;1E79
utildebelow;1E75
uubengali;098A
uudeva;090A
uugujarati;0A8A
uugurmukhi;0A0A
uumatragurmukhi;0A42
uuvowelsignbengali;09C2
uuvowelsigndeva;0942
uuvowelsigngujarati;0AC2
uvowelsignbengali;09C1
uvowelsigndeva;0941
uvowelsigngujarati;0AC1
v;0076
vadeva;0935
vagujarati;0AB5
vagurmukhi;0A35
vakatakana;30F7
vav;05D5
vavdagesh;FB35
vavdagesh65;FB35
vavdageshhebrew;FB35
vavhebrew;05D5
vavholam;FB4B
vavholamhebrew;FB4B
vavvavhebrew;05F0
vavyodhebrew;05F1
vcircle;24E5
vdotbelow;1E7F
vecyrillic;0432
veharabic;06A4
vehfinalarabic;FB6B
vehinitialarabic;FB6C
vehmedialarabic;FB6D
vekatakana;30F9
venus;2640
verticalbar;007C
verticallineabovecmb;030D
verticallinebelowcmb;0329
verticallinelowmod;02CC
verticallinemod;02C8
vewarmenian;057E
vhook;028B
vikatakana;30F8
viramabengali;09CD
viramadeva;094D
viramagujarati;0ACD
visargabengali;0983
visargadeva;0903
visargagujarati;0A83
vmonospace;FF56
voarmenian;0578
voicediterationhiragana;309E
voicediterationkatakana;30FE
voicedmarkkana;309B
voicedmarkkanahalfwidth;FF9E
vokatakana;30FA
vparen;24B1
vtilde;1E7D
vturned;028C
vuhiragana;3094
vukatakana;30F4
w;0077
wacute;1E83
waekorean;3159
wahiragana;308F
wakatakana;30EF
wakatakanahalfwidth;FF9C
wakorean;3158
wasmallhiragana;308E
wasmallkatakana;30EE
wattosquare;3357
wavedash;301C
wavyunderscorevertical;FE34
wawarabic;0648
wawfinalarabic;FEEE
wawhamzaabovearabic;0624
wawhamzaabovefinalarabic;FE86
wbsquare;33DD
wcircle;24E6
wcircumflex;0175
wdieresis;1E85
wdotaccent;1E87
wdotbelow;1E89
wehiragana;3091
weierstrass;2118
wekatakana;30F1
wekorean;315E
weokorean;315D
wgrave;1E81
whitebullet;25E6
whitecircle;25CB
whitecircleinverse;25D9
whitecornerbracketleft;300E
whitecornerbracketleftvertical;FE43
whitecornerbracketright;300F
whitecornerbracketrightvertical;FE44
whitediamond;25C7
whitediamondcontainingblacksmalldiamond;25C8
whitedownpointingsmalltriangle;25BF
whitedownpointingtriangle;25BD
whiteleftpointingsmalltriangle;25C3
whiteleftpointingtriangle;25C1
whitelenticularbracketleft;3016
whitelenticularbracketright;3017
whiterightpointingsmalltriangle;25B9
whiterightpointingtriangle;25B7
whitesmallsquare;25AB
whitesmilingface;263A
whitesquare;25A1
whitestar;2606
whitetelephone;260F
whitetortoiseshellbracketleft;3018
whitetortoiseshellbracketright;3019
whiteuppointingsmalltriangle;25B5
whiteuppointingtriangle;25B3
wihiragana;3090
wikatakana;30F0
wikorean;315F
wmonospace;FF57
wohiragana;3092
wokatakana;30F2
wokatakanahalfwidth;FF66
won;20A9
wonmonospace;FFE6
wowaenthai;0E27
wparen;24B2
wring;1E98
wsuperior;02B7
wturned;028D
wynn;01BF
x;0078
xabovecmb;033D
xbopomofo;3112
xcircle;24E7
xdieresis;1E8D
xdotaccent;1E8B
xeharmenian;056D
xi;03BE
xmonospace;FF58
xparen;24B3
xsuperior;02E3
y;0079
yaadosquare;334E
yabengali;09AF
yacute;00FD
yadeva;092F
yaekorean;3152
yagujarati;0AAF
yagurmukhi;0A2F
yahiragana;3084
yakatakana;30E4
yakatakanahalfwidth;FF94
yakorean;3151
yamakkanthai;0E4E
yasmallhiragana;3083
yasmallkatakana;30E3
yasmallkatakanahalfwidth;FF6C
yatcyrillic;0463
ycircle;24E8
ycircumflex;0177
ydieresis;00FF
ydotaccent;1E8F
ydotbelow;1EF5
yeharabic;064A
yehbarreearabic;06D2
yehbarreefinalarabic;FBAF
yehfinalarabic;FEF2
yehhamzaabovearabic;0626
yehhamzaabovefinalarabic;FE8A
yehhamzaaboveinitialarabic;FE8B
yehhamzaabovemedialarabic;FE8C
yehinitialarabic;FEF3
yehmedialarabic;FEF4
yehmeeminitialarabic;FCDD
yehmeemisolatedarabic;FC58
yehnoonfinalarabic;FC94
yehthreedotsbelowarabic;06D1
yekorean;3156
yen;00A5
yenmonospace;FFE5
yeokorean;3155
yeorinhieuhkorean;3186
yerahbenyomohebrew;05AA
yerahbenyomolefthebrew;05AA
yericyrillic;044B
yerudieresiscyrillic;04F9
yesieungkorean;3181
yesieungpansioskorean;3183
yesieungsioskorean;3182
yetivhebrew;059A
ygrave;1EF3
yhook;01B4
yhookabove;1EF7
yiarmenian;0575
yicyrillic;0457
yikorean;3162
yinyang;262F
yiwnarmenian;0582
ymonospace;FF59
yod;05D9
yoddagesh;FB39
yoddageshhebrew;FB39
yodhebrew;05D9
yodyodhebrew;05F2
yodyodpatahhebrew;FB1F
yohiragana;3088
yoikorean;3189
yokatakana;30E8
yokatakanahalfwidth;FF96
yokorean;315B
yosmallhiragana;3087
yosmallkatakana;30E7
yosmallkatakanahalfwidth;FF6E
yotgreek;03F3
yoyaekorean;3188
yoyakorean;3187
yoyakthai;0E22
yoyingthai;0E0D
yparen;24B4
ypogegrammeni;037A
ypogegrammenigreekcmb;0345
yr;01A6
yring;1E99
ysuperior;02B8
ytilde;1EF9
yturned;028E
yuhiragana;3086
yuikorean;318C
yukatakana;30E6
yukatakanahalfwidth;FF95
yukorean;3160
yusbigcyrillic;046B
yusbigiotifiedcyrillic;046D
yuslittlecyrillic;0467
yuslittleiotifiedcyrillic;0469
yusmallhiragana;3085
yusmallkatakana;30E5
yusmallkatakanahalfwidth;FF6D
yuyekorean;318B
yuyeokorean;318A
yyabengali;09DF
yyadeva;095F
z;007A
zaarmenian;0566
zacute;017A
zadeva;095B
zagurmukhi;0A5B
zaharabic;0638
zahfinalarabic;FEC6
zahinitialarabic;FEC7
zahiragana;3056
zahmedialarabic;FEC8
zainarabic;0632
zainfinalarabic;FEB0
zakatakana;30B6
zaqefgadolhebrew;0595
zaqefqatanhebrew;0594
zarqahebrew;0598
zayin;05D6
zayindagesh;FB36
zayindageshhebrew;FB36
zayinhebrew;05D6
zbopomofo;3117
zcaron;017E
zcircle;24E9
zcircumflex;1E91
zcurl;0291
zdot;017C
zdotaccent;017C
zdotbelow;1E93
zecyrillic;0437
zedescendercyrillic;0499
zedieresiscyrillic;04DF
zehiragana;305C
zekatakana;30BC
zero;0030
zeroarabic;0660
zerobengali;09E6
zerodeva;0966
zerogujarati;0AE6
zerogurmukhi;0A66
zerohackarabic;0660
zeroinferior;2080
zeromonospace;FF10
zerooldstyle;F730
zeropersian;06F0
zerosuperior;2070
zerothai;0E50
zerowidthjoiner;FEFF
zerowidthnonjoiner;200C
zerowidthspace;200B
zeta;03B6
zhbopomofo;3113
zhearmenian;056A
zhebrevecyrillic;04C2
zhecyrillic;0436
zhedescendercyrillic;0497
zhedieresiscyrillic;04DD
zihiragana;3058
zikatakana;30B8
zinorhebrew;05AE
zlinebelow;1E95
zmonospace;FF5A
zohiragana;305E
zokatakana;30BE
zparen;24B5
zretroflexhook;0290
zstroke;01B6
zuhiragana;305A
zukatakana;30BA
a100;275E
a101;2761
a102;2762
a103;2763
a104;2764
a105;2710
a106;2765
a107;2766
a108;2767
a109;2660
a10;2721
a110;2665
a111;2666
a112;2663
a117;2709
a118;2708
a119;2707
a11;261B
a120;2460
a121;2461
a122;2462
a123;2463
a124;2464
a125;2465
a126;2466
a127;2467
a128;2468
a129;2469
a12;261E
a130;2776
a131;2777
a132;2778
a133;2779
a134;277A
a135;277B
a136;277C
a137;277D
a138;277E
a139;277F
a13;270C
a140;2780
a141;2781
a142;2782
a143;2783
a144;2784
a145;2785
a146;2786
a147;2787
a148;2788
a149;2789
a14;270D
a150;278A
a151;278B
a152;278C
a153;278D
a154;278E
a155;278F
a156;2790
a157;2791
a158;2792
a159;2793
a15;270E
a160;2794
a161;2192
a162;27A3
a163;2194
a164;2195
a165;2799
a166;279B
a167;279C
a168;279D
a169;279E
a16;270F
a170;279F
a171;27A0
a172;27A1
a173;27A2
a174;27A4
a175;27A5
a176;27A6
a177;27A7
a178;27A8
a179;27A9
a17;2711
a180;27AB
a181;27AD
a182;27AF
a183;27B2
a184;27B3
a185;27B5
a186;27B8
a187;27BA
a188;27BB
a189;27BC
a18;2712
a190;27BD
a191;27BE
a192;279A
a193;27AA
a194;27B6
a195;27B9
a196;2798
a197;27B4
a198;27B7
a199;27AC
a19;2713
a1;2701
a200;27AE
a201;27B1
a202;2703
a203;2750
a204;2752
a205;276E
a206;2770
a20;2714
a21;2715
a22;2716
a23;2717
a24;2718
a25;2719
a26;271A
a27;271B
a28;271C
a29;2722
a2;2702
a30;2723
a31;2724
a32;2725
a33;2726
a34;2727
a35;2605
a36;2729
a37;272A
a38;272B
a39;272C
a3;2704
a40;272D
a41;272E
a42;272F
a43;2730
a44;2731
a45;2732
a46;2733
a47;2734
a48;2735
a49;2736
a4;260E
a50;2737
a51;2738
a52;2739
a53;273A
a54;273B
a55;273C
a56;273D
a57;273E
a58;273F
a59;2740
a5;2706
a60;2741
a61;2742
a62;2743
a63;2744
a64;2745
a65;2746
a66;2747
a67;2748
a68;2749
a69;274A
a6;271D
a70;274B
a71;25CF
a72;274D
a73;25A0
a74;274F
a75;2751
a76;25B2
a77;25BC
a78;25C6
a79;2756
a7;271E
a81;25D7
a82;2758
a83;2759
a84;275A
a85;276F
a86;2771
a87;2772
a88;2773
a89;2768
a8;271F
a90;2769
a91;276C
a92;276D
a93;276A
a94;276B
a95;2774
a96;2775
a97;275B
a98;275C
a99;275D
a9;2720
"""
# string table management
#
class StringTable:
def __init__( self, name_list, master_table_name ):
self.names = name_list
self.master_table = master_table_name
self.indices = {}
index = 0
for name in name_list:
self.indices[name] = index
index += len( name ) + 1
self.total = index
def dump( self, file ):
write = file.write
write( " static const char " + self.master_table +
"[" + repr( self.total ) + "] =\n" )
write( " {\n" )
line = ""
for name in self.names:
line += " '"
line += string.join( ( re.findall( ".", name ) ), "','" )
line += "', 0,\n"
write( line + " };\n\n\n" )
def dump_sublist( self, file, table_name, macro_name, sublist ):
write = file.write
write( "#define " + macro_name + " " + repr( len( sublist ) ) + "\n\n" )
write( " /* Values are offsets into the `" +
self.master_table + "' table */\n\n" )
write( " static const short " + table_name +
"[" + macro_name + "] =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for name in sublist:
line += comma
line += "%4d" % self.indices[name]
col += 1
comma = ","
if col == 14:
col = 0
comma = ",\n "
write( line + "\n };\n\n\n" )
# We now store the Adobe Glyph List in compressed form. The list is put
# into a data structure called `trie' (because it has a tree-like
# appearance). Consider, for example, that you want to store the
# following name mapping:
#
# A => 1
# Aacute => 6
# Abalon => 2
# Abstract => 4
#
# It is possible to store the entries as follows.
#
# A => 1
# |
# +-acute => 6
# |
# +-b
# |
# +-alon => 2
# |
# +-stract => 4
#
# We see that each node in the trie has:
#
# - one or more `letters'
# - an optional value
# - zero or more child nodes
#
# The first step is to call
#
# root = StringNode( "", 0 )
# for word in map.values():
# root.add( word, map[word] )
#
# which creates a large trie where each node has only one children.
#
# Executing
#
# root = root.optimize()
#
# optimizes the trie by merging the letters of successive nodes whenever
# possible.
#
# Each node of the trie is stored as follows.
#
# - First the node's letter, according to the following scheme. We
# use the fact that in the AGL no name contains character codes > 127.
#
# name bitsize description
# ----------------------------------------------------------------
# notlast 1 Set to 1 if this is not the last letter
# in the word.
# ascii 7 The letter's ASCII value.
#
# - The letter is followed by a children count and the value of the
# current key (if any). Again we can do some optimization because all
# AGL entries are from the BMP; this means that 16 bits are sufficient
# to store its Unicode values. Additionally, no node has more than
# 127 children.
#
# name bitsize description
# -----------------------------------------
# hasvalue 1 Set to 1 if a 16-bit Unicode value follows.
# num_children 7 Number of children. Can be 0 only if
# `hasvalue' is set to 1.
# value 16 Optional Unicode value.
#
# - A node is finished by a list of 16bit absolute offsets to the
# children, which must be sorted in increasing order of their first
# letter.
#
# For simplicity, all 16bit quantities are stored in big-endian order.
#
# The root node has first letter = 0, and no value.
#
class StringNode:
def __init__( self, letter, value ):
self.letter = letter
self.value = value
self.children = {}
def __cmp__( self, other ):
return ord( self.letter[0] ) - ord( other.letter[0] )
def add( self, word, value ):
if len( word ) == 0:
self.value = value
return
letter = word[0]
word = word[1:]
if self.children.has_key( letter ):
child = self.children[letter]
else:
child = StringNode( letter, 0 )
self.children[letter] = child
child.add( word, value )
def optimize( self ):
# optimize all children first
children = self.children.values()
self.children = {}
for child in children:
self.children[child.letter[0]] = child.optimize()
# don't optimize if there's a value,
# if we don't have any child or if we
# have more than one child
if ( self.value != 0 ) or ( not children ) or len( children ) > 1:
return self
child = children[0]
self.letter += child.letter
self.value = child.value
self.children = child.children
return self
def dump_debug( self, write, margin ):
# this is used during debugging
line = margin + "+-"
if len( self.letter ) == 0:
line += "<NOLETTER>"
else:
line += self.letter
if self.value:
line += " => " + repr( self.value )
write( line + "\n" )
if self.children:
margin += "| "
for child in self.children.values():
child.dump_debug( write, margin )
def locate( self, index ):
self.index = index
if len( self.letter ) > 0:
index += len( self.letter ) + 1
else:
index += 2
if self.value != 0:
index += 2
children = self.children.values()
children.sort()
index += 2 * len( children )
for child in children:
index = child.locate( index )
return index
def store( self, storage ):
# write the letters
l = len( self.letter )
if l == 0:
storage += struct.pack( "B", 0 )
else:
for n in range( l ):
val = ord( self.letter[n] )
if n < l - 1:
val += 128
storage += struct.pack( "B", val )
# write the count
children = self.children.values()
children.sort()
count = len( children )
if self.value != 0:
storage += struct.pack( "!BH", count + 128, self.value )
else:
storage += struct.pack( "B", count )
for child in children:
storage += struct.pack( "!H", child.index )
for child in children:
storage = child.store( storage )
return storage
def adobe_glyph_values():
"""return the list of glyph names and their unicode values"""
lines = string.split( adobe_glyph_list, '\n' )
glyphs = []
values = []
for line in lines:
if line:
fields = string.split( line, ';' )
# print fields[1] + ' - ' + fields[0]
subfields = string.split( fields[1], ' ' )
if len( subfields ) == 1:
glyphs.append( fields[0] )
values.append( fields[1] )
return glyphs, values
def filter_glyph_names( alist, filter ):
"""filter `alist' by taking _out_ all glyph names that are in `filter'"""
count = 0
extras = []
for name in alist:
try:
filtered_index = filter.index( name )
except:
extras.append( name )
return extras
def dump_encoding( file, encoding_name, encoding_list ):
"""dump a given encoding"""
write = file.write
write( " /* the following are indices into the SID name table */\n" )
write( " static const unsigned short " + encoding_name +
"[" + repr( len( encoding_list ) ) + "] =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for value in encoding_list:
line += comma
line += "%3d" % value
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
write( line + "\n };\n\n\n" )
def dump_array( the_array, write, array_name ):
"""dumps a given encoding"""
write( " static const unsigned char " + array_name +
"[" + repr( len( the_array ) ) + "L] =\n" )
write( " {\n" )
line = ""
comma = " "
col = 0
for value in the_array:
line += comma
line += "%3d" % ord( value )
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
if len( line ) > 1024:
write( line )
line = ""
write( line + "\n };\n\n\n" )
def main():
"""main program body"""
if len( sys.argv ) != 2:
print __doc__ % sys.argv[0]
sys.exit( 1 )
file = open( sys.argv[1], "w\n" )
write = file.write
count_sid = len( sid_standard_names )
# `mac_extras' contains the list of glyph names in the Macintosh standard
# encoding which are not in the SID Standard Names.
#
mac_extras = filter_glyph_names( mac_standard_names, sid_standard_names )
# `base_list' contains the names of our final glyph names table.
# It consists of the `mac_extras' glyph names, followed by the SID
# standard names.
#
mac_extras_count = len( mac_extras )
base_list = mac_extras + sid_standard_names
write( "/***************************************************************************/\n" )
write( "/* */\n" )
write( "/* %-71s*/\n" % os.path.basename( sys.argv[1] ) )
write( "/* */\n" )
write( "/* PostScript glyph names. */\n" )
write( "/* */\n" )
write( "/* Copyright 2005, 2008, 2011 by */\n" )
write( "/* David Turner, Robert Wilhelm, and Werner Lemberg. */\n" )
write( "/* */\n" )
write( "/* This file is part of the FreeType project, and may only be used, */\n" )
write( "/* modified, and distributed under the terms of the FreeType project */\n" )
write( "/* license, LICENSE.TXT. By continuing to use, modify, or distribute */\n" )
write( "/* this file you indicate that you have read the license and */\n" )
write( "/* understand and accept it fully. */\n" )
write( "/* */\n" )
write( "/***************************************************************************/\n" )
write( "\n" )
write( "\n" )
write( " /* This file has been generated automatically -- do not edit! */\n" )
write( "\n" )
write( "\n" )
# dump final glyph list (mac extras + sid standard names)
#
st = StringTable( base_list, "ft_standard_glyph_names" )
st.dump( file )
st.dump_sublist( file, "ft_mac_names",
"FT_NUM_MAC_NAMES", mac_standard_names )
st.dump_sublist( file, "ft_sid_names",
"FT_NUM_SID_NAMES", sid_standard_names )
dump_encoding( file, "t1_standard_encoding", t1_standard_encoding )
dump_encoding( file, "t1_expert_encoding", t1_expert_encoding )
# dump the AGL in its compressed form
#
agl_glyphs, agl_values = adobe_glyph_values()
dict = StringNode( "", 0 )
for g in range( len( agl_glyphs ) ):
dict.add( agl_glyphs[g], eval( "0x" + agl_values[g] ) )
dict = dict.optimize()
dict_len = dict.locate( 0 )
dict_array = dict.store( "" )
write( """\
/*
* This table is a compressed version of the Adobe Glyph List (AGL),
* optimized for efficient searching. It has been generated by the
* `glnames.py' python script located in the `src/tools' directory.
*
* The lookup function to get the Unicode value for a given string
* is defined below the table.
*/
#ifdef FT_CONFIG_OPTION_ADOBE_GLYPH_LIST
""" )
dump_array( dict_array, write, "ft_adobe_glyph_list" )
# write the lookup routine now
#
write( """\
/*
* This function searches the compressed table efficiently.
*/
static unsigned long
ft_get_adobe_glyph_index( const char* name,
const char* limit )
{
int c = 0;
int count, min, max;
const unsigned char* p = ft_adobe_glyph_list;
if ( name == 0 || name >= limit )
goto NotFound;
c = *name++;
count = p[1];
p += 2;
min = 0;
max = count;
while ( min < max )
{
int mid = ( min + max ) >> 1;
const unsigned char* q = p + mid * 2;
int c2;
q = ft_adobe_glyph_list + ( ( (int)q[0] << 8 ) | q[1] );
c2 = q[0] & 127;
if ( c2 == c )
{
p = q;
goto Found;
}
if ( c2 < c )
min = mid + 1;
else
max = mid;
}
goto NotFound;
Found:
for (;;)
{
/* assert (*p & 127) == c */
if ( name >= limit )
{
if ( (p[0] & 128) == 0 &&
(p[1] & 128) != 0 )
return (unsigned long)( ( (int)p[2] << 8 ) | p[3] );
goto NotFound;
}
c = *name++;
if ( p[0] & 128 )
{
p++;
if ( c != (p[0] & 127) )
goto NotFound;
continue;
}
p++;
count = p[0] & 127;
if ( p[0] & 128 )
p += 2;
p++;
for ( ; count > 0; count--, p += 2 )
{
int offset = ( (int)p[0] << 8 ) | p[1];
const unsigned char* q = ft_adobe_glyph_list + offset;
if ( c == ( q[0] & 127 ) )
{
p = q;
goto NextIter;
}
}
goto NotFound;
NextIter:
;
}
NotFound:
return 0;
}
#endif /* FT_CONFIG_OPTION_ADOBE_GLYPH_LIST */
""" )
if 0: # generate unit test, or don't
#
# now write the unit test to check that everything works OK
#
write( "#ifdef TEST\n\n" )
write( "static const char* const the_names[] = {\n" )
for name in agl_glyphs:
write( ' "' + name + '",\n' )
write( " 0\n};\n" )
write( "static const unsigned long the_values[] = {\n" )
for val in agl_values:
write( ' 0x' + val + ',\n' )
write( " 0\n};\n" )
write( """
#include <stdlib.h>
#include <stdio.h>
int
main( void )
{
int result = 0;
const char* const* names = the_names;
const unsigned long* values = the_values;
for ( ; *names; names++, values++ )
{
const char* name = *names;
unsigned long reference = *values;
unsigned long value;
value = ft_get_adobe_glyph_index( name, name + strlen( name ) );
if ( value != reference )
{
result = 1;
fprintf( stderr, "name '%s' => %04x instead of %04x\\n",
name, value, reference );
}
}
return result;
}
""" )
write( "#endif /* TEST */\n" )
write("\n/* END */\n")
# Now run the main routine
#
main()
# END
| gpl-2.0 |
jeremyblalock/translucent | translucent.py | 1 | 1658 | #!/usr/bin/python
import sys, re
class Color(object):
def __init__(self, inp, alpha=1):
self.alpha = alpha
if type(inp) is str:
if re.match(r'^#\d{3}$', inp):
inp = '#' + inp[1] * 2 + inp[2] * 2 + inp[3] * 2
if re.match(r'^#\d{6}$', inp):
self._data = (int(inp[1:2], 16),
int(inp[3:4], 16),
int(inp[5:6], 16))
if re.match(r'^\d{1,3}\s*,\s*\d{1,3}\s*,\s*\d{1,3}\s*$', inp):
self._data = tuple([int(a) for a in inp.split(',')])
else:
self._data = inp
def translucent(self, alpha):
bg = 255
newcolor = [unmix(x, bg, alpha) for x in self._data]
return self.__class__(newcolor)
def hex(self):
cleaned_data = [int(round(x)) for x in self._data]
for i in range(len(cleaned_data)):
if cleaned_data[i] > 255:
cleaned_data[i] = 255
elif cleaned_data[i] < 0:
cleaned_data[i] = 0
output = '#' + ''.join([('0' + '{0:X}'.format(x))[-2:] for x in cleaned_data])
return output
def __repr__(self):
return 'Color(' + self.hex() + ')'
#return 'Color' + str(self._data)
def unmix(composite, bg, alpha):
top = (composite - (1 - alpha) * bg) / alpha
return top
if __name__ == '__main__':
color = Color(sys.argv[1])
alpha = float(sys.argv[2])
if alpha > 1 or alpha < 0:
raise Exception('Alpha must be between 0 & 1.')
print 'COLOR:', color
print 'ALPHA:', alpha
print 'TRANSLUCENT:', color.translucent(alpha)
| mit |
petrk94/Bytebot | plugins/weather.py | 1 | 1832 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from time import time
from plugins.plugin import Plugin
from bytebot_config import BYTEBOT_PLUGIN_CONFIG
import requests
class weather(Plugin):
"""
The weather function read the current temperature of the city Erfurt.
It is necessary to register to get an API key from openweathermap
http://openweathermap.org/api
"""
def __init__(self):
pass
def registerCommand(self, irc):
irc.registerCommand('!weather', 'weather in Erfurt')
def onPrivmsg(self, irc, msg, channel, user):
if msg.find('!weather') == -1:
return
self.irc = irc
self.channel = channel
try:
last_weather = irc.last_weather
except Exception:
last_weather = 0
if last_weather < (time() - 5):
config = BYTEBOT_PLUGIN_CONFIG['weather']
if msg.find(' ') == -1:
location = config['location']
else:
location = "".join(msg.split(' ')[1:])
url = config['url'] + location + '&appid=%s' % config['api_key']
r = requests.get(url)
if r.status_code != 200:
irc.msg(channel, 'Error while retrieving weather data')
return
try:
j = r.json()
temp = j["main"]["temp"]
location = "%s,%s" % (j["name"].encode('utf-8'),
j["sys"]["country"].encode('utf-8'))
except KeyError:
irc.msg(channel, "Error while retrieving weather data")
return
irc.msg(channel, "We have now %2.2f °C in %s" % (temp, location))
irc.last_weather = time()
else:
irc.msg(channel, "Don't overdo it ;)")
| mit |
goliate/sarakha63-persomov | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/freespeech.py | 165 | 1226 | from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
class FreespeechIE(InfoExtractor):
IE_NAME = 'freespeech.org'
_VALID_URL = r'https://www\.freespeech\.org/video/(?P<title>.+)'
_TEST = {
'add_ie': ['Youtube'],
'url': 'https://www.freespeech.org/video/obama-romney-campaign-colorado-ahead-debate-0',
'info_dict': {
'id': 'poKsVCZ64uU',
'ext': 'mp4',
'title': 'Obama, Romney Campaign in Colorado Ahead of Debate',
'description': 'Obama, Romney Campaign in Colorado Ahead of Debate',
'uploader': 'freespeechtv',
'uploader_id': 'freespeechtv',
'upload_date': '20121002',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
title = mobj.group('title')
webpage = self._download_webpage(url, title)
info_json = self._search_regex(r'jQuery.extend\(Drupal.settings, ({.*?})\);', webpage, 'info')
info = json.loads(info_json)
return {
'_type': 'url',
'url': info['jw_player']['basic_video_node_player']['file'],
'ie_key': 'Youtube',
}
| gpl-3.0 |
infirit/blueman | blueman/main/Tray.py | 2 | 2373 | from importlib import import_module
import logging
import os
import sys
from blueman.main.DBusProxies import AppletService
from gi.repository import Gio, GLib
class BluemanTray(Gio.Application):
def __init__(self) -> None:
super().__init__(application_id="org.blueman.Tray", flags=Gio.ApplicationFlags.FLAGS_NONE)
self._active = False
def do_activate(self) -> None:
if self._active:
logging.info("Already running, restarting instance")
os.execv(sys.argv[0], sys.argv)
Gio.bus_watch_name(Gio.BusType.SESSION, 'org.blueman.Applet', Gio.BusNameWatcherFlags.NONE,
self._on_name_appeared, self._on_name_vanished)
self.hold()
def _on_name_appeared(self, _connection: Gio.DBusConnection, name: str, _owner: str) -> None:
logging.debug("Applet started on name %s, showing indicator" % name)
applet = AppletService()
indicator_name = applet.GetStatusIconImplementation()
logging.info(f'Using indicator "{indicator_name}"')
indicator_class = getattr(import_module('blueman.main.indicators.' + indicator_name), indicator_name)
self.indicator = indicator_class(applet.GetIconName(), self._activate_menu_item, self._activate_status_icon)
applet.connect('g-signal', self.on_signal)
self.indicator.set_text(applet.GetText())
self.indicator.set_visibility(applet.GetVisibility())
self.indicator.set_menu(applet.GetMenu())
self._active = True
def _on_name_vanished(self, _connection: Gio.DBusConnection, _name: str) -> None:
logging.debug("Applet shutdown or not available at startup")
self.quit()
def _activate_menu_item(self, *indexes: int) -> None:
AppletService().ActivateMenuItem('(ai)', indexes)
def _activate_status_icon(self) -> None:
AppletService().Activate()
def on_signal(self, _applet: AppletService, _sender_name: str, signal_name: str, args: GLib.Variant) -> None:
if signal_name == 'IconNameChanged':
self.indicator.set_icon(*args)
elif signal_name == 'TextChanged':
self.indicator.set_text(*args)
elif signal_name == 'VisibilityChanged':
self.indicator.set_visibility(*args)
elif signal_name == 'MenuChanged':
self.indicator.set_menu(*args)
| gpl-3.0 |
rsj217/tongdao | app/libs/router.py | 1 | 1736 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# # The route helpers were originally written by
# # Jeremy Kelley (http://github.com/nod).
import tornado.web
class Route(object):
"""
decorates RequestHandlers and builds up a list of routables handlers
Tech Notes (or 'What the *@# is really happening here?')
--------------------------------------------------------
Everytime @route('...') is called, we instantiate a new route object which
saves off the passed in URI. Then, since it's a decorator, the function is
passed to the route.__call__ method as an argument. We save a reference to
that handler with our uri in our class level routes list then return that
class to be instantiated as normal.
Later, we can call the classmethod route.get_routes to return that list of
tuples which can be handed directly to the tornado.web.Application
instantiation.
Example
-------
@route('/some/path')
class SomeRequestHandler(RequestHandler):
pass
@route('/some/path', name='other')
class SomeOtherRequestHandler(RequestHandler):
pass
my_routes = route.get_routes()
"""
routes = []
def __init__(self, uri, name=None):
self._uri = uri
self.name = name
def __call__(self, _handler):
"""gets called when we class decorate"""
name = self.name and self.name or _handler.__name__
self.routes.append(tornado.web.url(self._uri, _handler, name=name))
return _handler
@classmethod
def get_routes(cls):
return cls.routes
def route_redirect(from_, to, name=None):
Route.routes.append(tornado.web.url(from_, tornado.web.RedirectHandler, dict(url=to), name=name))
| mit |
moondrop-entertainment/django-nonrel-drawp | django/conf/locale/sr_Latn/formats.py | 655 | 1980 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
'%Y-%m-%d', # '2006-10-25'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
mvesper/invenio-demosite | invenio_demosite/testsuite/regression/test_bibindex.py | 7 | 67493 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio Demosite.
# Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013 CERN.
#
# Invenio Demosite is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio Demosite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibIndex Regression Test Suite."""
__revision__ = "$Id$"
from invenio.testsuite import InvenioTestCase
import os
from datetime import timedelta
from intbitset import intbitset
from invenio.testsuite import make_test_suite, run_test_suite, nottest
from invenio.base.globals import cfg
from invenio.base.wrappers import lazy_import
from invenio_demosite.testsuite.regression.test_bibupload import wipe_out_record_from_all_tables
WordTable = lazy_import('invenio.legacy.bibindex.engine:WordTable')
get_word_tables = lazy_import('invenio.legacy.bibindex.engine:get_word_tables')
find_affected_records_for_index = lazy_import('invenio.legacy.bibindex.engine:find_affected_records_for_index')
get_recIDs_by_date_authority = lazy_import('invenio.legacy.bibindex.engine:get_recIDs_by_date_authority')
get_recIDs_by_date_bibliographic = lazy_import('invenio.legacy.bibindex.engine:get_recIDs_by_date_bibliographic')
create_range_list = lazy_import('invenio.legacy.bibindex.engine:create_range_list')
beautify_range_list = lazy_import('invenio.legacy.bibindex.engine:beautify_range_list')
get_last_updated_all_indexes = lazy_import('invenio.legacy.bibindex.engine:get_last_updated_all_indexes')
get_index_id_from_index_name = lazy_import('invenio.legacy.bibindex.engine_utils:get_index_id_from_index_name')
get_index_tags = lazy_import('invenio.legacy.bibindex.engine_utils:get_index_tags')
get_tag_indexes = lazy_import('invenio.legacy.bibindex.engine_utils:get_tag_indexes')
get_all_indexes = lazy_import('invenio.legacy.bibindex.engine_utils:get_all_indexes')
CFG_BIBINDEX_INDEX_TABLE_TYPE = lazy_import('invenio.legacy.bibindex.engine_config:CFG_BIBINDEX_INDEX_TABLE_TYPE')
task_low_level_submission = lazy_import('invenio.legacy.bibsched.bibtask:task_low_level_submission')
run_sql = lazy_import('invenio.legacy.dbquery:run_sql')
deserialize_via_marshal = lazy_import('invenio.legacy.dbquery:deserialize_via_marshal')
get_record = lazy_import('invenio.legacy.search_engine:get_record')
get_fieldvalues = lazy_import('invenio.legacy.bibrecord:get_fieldvalues')
get_index_strings_by_control_no = lazy_import('invenio.legacy.bibauthority.engine:get_index_strings_by_control_no')
get_control_nos_from_recID = lazy_import('invenio.legacy.bibauthority.engine:get_control_nos_from_recID')
run_sql_drop_silently = lazy_import('invenio.legacy.bibindex.engine_utils:run_sql_drop_silently')
bibupload = lazy_import('invenio.legacy.bibupload.engine:bibupload')
xml_marc_to_records = lazy_import('invenio.legacy.bibupload.engine:xml_marc_to_records')
record_get_field_value = lazy_import('invenio.legacy.bibrecord:record_get_field_value')
get_max_recid = lazy_import('invenio.legacy.bibsort.engine:get_max_recid')
def reindex_for_type_with_bibsched(index_name, force_all=False, *other_options):
"""Runs bibindex for the specified index and returns the task_id.
@param index_name: name of the index to reindex
@param force_all: if it's True function will reindex all records
not just affected ones
"""
program = os.path.join(cfg['CFG_BINDIR'], 'bibindex')
args = ['bibindex', 'bibindex_regression_tests', '-w', index_name, '-u', 'admin']
args.extend(other_options)
if force_all:
args.append("--force")
task_id = task_low_level_submission(*args)
COMMAND = "%s %s > /dev/null 2> /dev/null" % (program, str(task_id))
os.system(COMMAND)
return task_id
def prepare_for_index_update(index_id, parameters={}):
""" Prepares SQL query for an update of an index in the idxINDEX table.
Takes into account remove_stopwords, remove_html_markup, remove_latex_markup,
tokenizer and last_updated as parameters to change.
remove_html_markup and remove_latex_markup accepts these values:
'' to leave it unchanged
'Yes' to change it to 'Yes'
'No' to change it to 'No'.
For remove_stopwords instead of 'Yes' one must give the name of the file (for example: 'stopwords.kb')
from CFG_ETCDIR/bibrank/ directory pointing at stopwords knowledge base.
For tokenizer please specify the name of the tokenizer.
For last_updated provide a date in format: '2013-01-31 00:00:00'
@param index_id: id of the index to change
@param parameters: dict with names of parameters and their new values
"""
if len(parameters) == 0:
return ''
parameter_set = False
query_update = "UPDATE idxINDEX SET "
for key in parameters:
if parameters[key]:
query_update += parameter_set and ", " or ""
query_update += "%s='%s'" % (key, parameters[key])
parameter_set = True
query_update += " WHERE id=%s" % index_id
return query_update
@nottest
def reindex_word_tables_into_testtables(index_name, recids = None, prefix = 'test', parameters={}, turn_off_virtual_indexes=True):
"""Function for setting up a test enviroment. Reindexes an index with a given name to a
new temporary table with a given prefix. During the reindexing it changes some parameters
of chosen index. It's useful for conducting tests concerning the reindexing process.
Reindexes only idxWORDxxx tables.
@param index_name: name of the index we want to reindex
@param recids: None means reindexing all records, set ids of the records to update only part of them
@param prefix: prefix for the new tabels, if it's set to boolean False function will reindex to original table
@param parameters: dict with parameters and their new values; for more specific
description take a look at 'prepare_for_index_update' function.
@param turn_off_virtual_indexes: if True only specific index will be reindexed
without connected virtual indexes
"""
index_id = get_index_id_from_index_name(index_name)
query_update = prepare_for_index_update(index_id, parameters)
last_updated = run_sql("""SELECT last_updated FROM idxINDEX WHERE id=%s""" % index_id)[0][0]
test_tablename = "%s_idxWORD%02d" % (prefix, index_id)
query_drop_forward_index_table = """DROP TABLE IF EXISTS %sF""" % test_tablename
query_drop_reversed_index_table = """DROP TABLE IF EXISTS %sR""" % test_tablename
query_create_forward_index_table = """CREATE TABLE %sF (
id mediumint(9) unsigned NOT NULL auto_increment,
term varchar(50) default NULL,
hitlist longblob,
PRIMARY KEY (id),
UNIQUE KEY term (term)
) ENGINE=MyISAM""" % test_tablename
query_create_reversed_index_table = """CREATE TABLE %sR (
id_bibrec mediumint(9) unsigned NOT NULL,
termlist longblob,
type enum('CURRENT','FUTURE','TEMPORARY') NOT NULL default 'CURRENT',
PRIMARY KEY (id_bibrec,type)
) ENGINE=MyISAM""" % test_tablename
run_sql_drop_silently(query_drop_forward_index_table)
run_sql_drop_silently(query_drop_reversed_index_table)
run_sql(query_create_forward_index_table)
run_sql(query_create_reversed_index_table)
if query_update:
run_sql(query_update)
pattern = 'idxWORD'
if prefix:
pattern = '%s_idxWORD' % prefix
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=get_index_tags(index_name),
table_name_pattern= pattern + '%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
if turn_off_virtual_indexes:
wordTable.turn_off_virtual_indexes()
if recids:
wordTable.add_recIDs(recids, 10000)
else:
recIDs_for_index = find_affected_records_for_index([index_name],
[[1, get_max_recid()]],
True)
bib_recIDs = get_recIDs_by_date_bibliographic([], index_name)
auth_recIDs = get_recIDs_by_date_authority([], index_name)
final_recIDs = bib_recIDs | auth_recIDs
final_recIDs = set(final_recIDs) & set(recIDs_for_index[index_name])
final_recIDs = beautify_range_list(create_range_list(list(final_recIDs)))
wordTable.add_recIDs(final_recIDs, 10000)
return last_updated
@nottest
def remove_reindexed_word_testtables(index_name, prefix = 'test'):
"""
Removes prefix_idxWORDxxx tables created during tests.
@param index_name: name of the index
@param prefix: prefix for the tables
"""
index_id = get_index_id_from_index_name(index_name)
test_tablename = "%s_idxWORD%02d" % (prefix, index_id)
query_drop_forward_index_table = """DROP TABLE IF EXISTS %sF""" % test_tablename
query_drop_reversed_index_table = """DROP TABLE IF EXISTS %sR""" % test_tablename
run_sql(query_drop_forward_index_table)
run_sql(query_drop_reversed_index_table)
class BibIndexRemoveStopwordsTest(InvenioTestCase):
"""Tests remove_stopwords parameter of an index. Changes it in the database
and reindexes from scratch into a new table to see the diffrence which is brought
by change. Uses 'title' index.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'title',
parameters = {'remove_stopwords':'stopwords.kb',
'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 4:
remove_reindexed_word_testtables('title')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('title'),
parameters = {'remove_stopwords':'No',
'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_check_occurrences_of_stopwords_in_testable_word_of(self):
"""Tests if term 'of' is in the new reindexed table"""
query = "SELECT hitlist FROM test_idxWORD08F WHERE term='of'"
res = run_sql(query)
self.assertEqual(0, len(res))
def test_check_occurrences_of_stopwords_in_testable_word_everything(self):
"""Tests if term 'everything' is in the new reindexed table"""
query = "SELECT hitlist FROM test_idxWORD08F WHERE term='everything'"
res = run_sql(query)
self.assertEqual(0, len(res))
def test_compare_non_stopwords_occurrences_in_original_and_test_tables_word_theory(self):
"""Checks if stopwords removing has no influence on indexation of word 'theory' """
word = "theori" #theori not theory, because of default stemming for title index
query = "SELECT hitlist FROM test_idxWORD08F WHERE term='%s'" % word
iset_removed = "iset_removed"
iset_original = "iset_original"
res = run_sql(query)
if res:
iset_removed = intbitset(res[0][0])
query = "SELECT hitlist FROM idxWORD08F WHERE term='%s'" % word
res = run_sql(query)
if res:
iset_original = intbitset(res[0][0])
self.assertEqual(len(iset_removed), len(iset_original))
def test_compare_non_stopwords_occurrences_in_original_and_test_tables_word_on(self):
"""Checks if stopwords removing has no influence on indexation of word 'o(n)' """
word = "o(n)"
query = "SELECT hitlist FROM test_idxWORD08F WHERE term='%s'" % word
iset_removed = "iset_removed"
iset_original = "iset_original"
res = run_sql(query)
if res:
iset_removed = intbitset(res[0][0])
query = "SELECT hitlist FROM idxWORD08F WHERE term='%s'" % word
res = run_sql(query)
if res:
iset_original = intbitset(res[0][0])
self.assertEqual(len(iset_removed), len(iset_original))
class BibIndexRemoveLatexTest(InvenioTestCase):
"""Tests remove_latex_markup parameter of an index. Changes it in the database
and reindexes from scratch into a new table to see the diffrence which is brought
by change. Uses 'abstract' index.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'abstract',
parameters = {'remove_latex_markup':'Yes',
'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 4:
remove_reindexed_word_testtables('abstract')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('abstract'),
parameters = {'remove_latex_markup':'No',
'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_check_occurrences_after_latex_removal_word_u1(self):
"""Tests how many times experssion 'u(1)' occures"""
word = "u(1)"
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
iset = "iset_change"
if res:
iset = intbitset(res[0][0])
self.assertEqual(3, len(iset))
def test_check_exact_occurrences_after_latex_removal_word_theta(self):
"""Tests where experssion 'theta' occures"""
word = "theta"
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([12], ilist)
def test_compare_occurrences_after_and_before_latex_removal_math_expression(self):
"""Checks if latex removal has no influence on indexation of expression 's(u(n_1)*u(n_2))' """
word = 's(u(n_1)*u(n_2))'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist_test = []
if res:
iset = intbitset(res[0][0])
ilist_test = iset.tolist()
word = 's(u(n_1)*u(n_2))'
query = "SELECT hitlist FROM idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist = ["default_not_equal"]
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual(ilist, ilist_test)
def test_check_occurrences_latex_expression_with_u1(self):
"""Tests influence of latex removal on record 80"""
word = '%over u(1)%'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term LIKE '%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([80], ilist)
class BibIndexRemoveHtmlTest(InvenioTestCase):
"""Tests remove_html_markup parameter of an index. Changes it in the database
and reindexes from scratch into a new table to see the diffrence which is brought
by change. Uses 'abstract' index.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'abstract',
parameters = {'remove_html_markup':'Yes',
'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 2:
remove_reindexed_word_testtables('abstract')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('abstract'),
parameters = {'remove_html_markup':'No',
'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_check_occurrences_after_html_removal_tag_p(self):
"""Tests if expression 'water-hog</p>' is not indexed after html markup removal"""
word = 'water-hog</p>'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual(0, len(ilist))
def test_check_occurrences_after_and_before_html_removal_word_style(self):
"""Tests html markup removal influence on expression 'style="width' """
word = 'style="width'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist_test = []
if res:
iset = intbitset(res[0][0])
ilist_test = iset.tolist()
query = "SELECT hitlist FROM idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('abstract'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertNotEqual(ilist, ilist_test)
class BibIndexYearIndexTest(InvenioTestCase):
"""
Checks year index. Tests are diffrent than those inside WebSearch module because
they only test content and reindexation and not the search itself.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'year',
parameters = {'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 3:
remove_reindexed_word_testtables('year')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('year'),
parameters = {'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_occurrences_in_year_index_1973(self):
"""checks content of year index for year 1973"""
word = '1973'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('year'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([34], ilist)
def test_occurrences_in_year_index_2001(self):
"""checks content of year index for year 2001"""
word = '2001'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('year'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([2, 11, 12, 15], ilist)
def test_comparison_for_number_of_items(self):
"""checks the reindexation of year index"""
query_test = "SELECT count(*) FROM test_idxWORD%02dF" % get_index_id_from_index_name('year')
query_orig = "SELECT count(*) FROM idxWORD%02dF" % get_index_id_from_index_name('year')
num_orig = 0
num_test = 1
res = run_sql(query_test)
if res:
num_test = res[0][0]
res = run_sql(query_orig)
if res:
num_orig = res[0][0]
self.assertEqual(num_orig, num_test)
class BibIndexAuthorCountIndexTest(InvenioTestCase):
"""
Checks author count index. Tests are diffrent than those inside WebSearch module because
they only test content and reindexation and not the search itself.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'authorcount',
parameters = {'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 2:
remove_reindexed_word_testtables('authorcount')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('authorcount'),
parameters = {'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_occurrences_in_authorcount_index(self):
"""checks content of authorcount index for papers with 4 authors"""
word = '4'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('authorcount'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([51, 54, 59, 66, 92, 96], ilist)
def test_comparison_for_number_of_items(self):
"""checks the reindexation of authorcount index"""
query_test = "SELECT count(*) FROM test_idxWORD%02dF" % get_index_id_from_index_name('authorcount')
query_orig = "SELECT count(*) FROM idxWORD%02dF" % get_index_id_from_index_name('authorcount')
num_orig = 0
num_test = 1
res = run_sql(query_test)
if res:
num_test = res[0][0]
res = run_sql(query_orig)
if res:
num_orig = res[0][0]
self.assertEqual(num_orig, num_test)
class BibIndexItemCountIndexTest(InvenioTestCase):
"""
Checks item count index. Checks a number of copies of books for records
as well as occurrences of particular number of copies in test data.
"""
def setUp(self):
index_name = 'itemcount'
index_id = get_index_id_from_index_name(index_name)
index_tags = get_word_tables([index_name])[0][2]
query = """SELECT termlist FROM idxWORD%02dR
WHERE id_bibrec=21""" % index_id
count = int(deserialize_via_marshal(run_sql(query)[0][0])[0])
# crcITEM table wasn't updated before initial indexing
# we need to update ItemCount index if count == 0
if count == 0:
wt = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={})
wt.add_recIDs([[1, 100]], 10000)
def test_occurrences_in_itemcount_index_two_copies(self):
"""checks content of itemcount index for records with two copies of a book"""
word = '2'
query = "SELECT hitlist FROM idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('itemcount'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([31, 34], ilist)
def test_records_for_number_of_copies_record1(self):
"""checks content of itemcount index for record: 1"""
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=1" \
% get_index_id_from_index_name('itemcount')
res = run_sql(query)
self.assertEqual(deserialize_via_marshal(res[0][0]),['0'])
def test_records_for_number_of_copies_record30(self):
"""checks content of itemcount index for record: 30"""
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=30" \
% get_index_id_from_index_name('itemcount')
res = run_sql(query)
self.assertEqual(deserialize_via_marshal(res[0][0]),['1'])
def test_records_for_number_of_copies_record32(self):
"""checks content of itemcount index for record: 32"""
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=32" \
% get_index_id_from_index_name('itemcount')
res = run_sql(query)
self.assertEqual(deserialize_via_marshal(res[0][0]),['3'])
class BibIndexFiletypeIndexTest(InvenioTestCase):
"""
Checks filetype index. Tests are diffrent than those inside WebSearch module because
they only test content and indexation and not the search itself.
"""
def test_occurances_of_tif_filetype(self):
"""tests which records has file with 'tif' extension"""
query = "SELECT hitlist FROM idxWORD%02dF where term='tif'" \
% get_index_id_from_index_name('filetype')
res = run_sql(query)
value = []
if res:
iset = intbitset(res[0][0])
value = iset.tolist()
self.assertEqual(sorted(value), [66, 71])
def test_filetypes_of_records(self):
"""tests files extensions of record 1 and 77"""
query1 = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=1" \
% get_index_id_from_index_name('filetype')
query2 = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=77" \
% get_index_id_from_index_name('filetype')
res1 = run_sql(query1)
res2 = run_sql(query2)
set1 = deserialize_via_marshal(res1[0][0])
set2 = deserialize_via_marshal(res2[0][0])
self.assertEqual(set1, ['gif', 'jpg'])
self.assertEqual(set2, ['pdf', 'ps.gz'])
class BibIndexJournalIndexTest(InvenioTestCase):
"""
Checks journal index. Tests are diffrent than those inside WebSearch module because
they only test content and reindexation and not the search itself.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'journal',
parameters = {'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 2:
remove_reindexed_word_testtables('journal')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('journal'),
parameters = {'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_occurrences_in_journal_index(self):
"""checks content of journal index for phrase: 'prog. theor. phys.' """
word = 'prog. theor. phys.'
query = "SELECT hitlist FROM test_idxWORD%02dF WHERE term='%s'" % (get_index_id_from_index_name('journal'), word)
res = run_sql(query)
ilist = []
if res:
iset = intbitset(res[0][0])
ilist = iset.tolist()
self.assertEqual([86], ilist)
def test_comparison_for_number_of_items(self):
"""checks the reindexation of journal index"""
query_test = "SELECT count(*) FROM test_idxWORD%02dF" % get_index_id_from_index_name('journal')
query_orig = "SELECT count(*) FROM idxWORD%02dF" % get_index_id_from_index_name('journal')
num_orig = 0
num_test = 1
res = run_sql(query_test)
if res:
num_test = res[0][0]
res = run_sql(query_orig)
if res:
num_orig = res[0][0]
self.assertEqual(num_orig, num_test)
class BibIndexCJKTokenizerTitleIndexTest(InvenioTestCase):
"""
Checks CJK tokenization on title index.
"""
test_counter = 0
reindexed = False
@classmethod
def setUp(self):
"""reindexation to new table"""
if not self.reindexed:
self.last_updated = reindex_word_tables_into_testtables(
'title',
parameters = {'tokenizer':'BibIndexCJKTokenizer',
'last_updated':'0000-00-00 00:00:00'})
self.reindexed = True
@classmethod
def tearDown(self):
"""cleaning up"""
self.test_counter += 1
if self.test_counter == 2:
remove_reindexed_word_testtables('title')
reverse_changes = prepare_for_index_update(
get_index_id_from_index_name('title'),
parameters = {'tokenizer':'BibIndexDefaultTokenizer',
'last_updated':self.last_updated})
run_sql(reverse_changes)
def test_splliting_and_indexing_CJK_characters_forward_table(self):
"""CJK Tokenizer - searching for a CJK term in title index, forward table"""
query = "SELECT * from test_idxWORD%02dF where term='\xe6\x95\xac'" % get_index_id_from_index_name('title')
res = run_sql(query)
iset = []
if res:
iset = intbitset(res[0][2])
iset = iset.tolist()
self.assertEqual(iset, [104])
def test_splliting_and_indexing_CJK_characters_reversed_table(self):
"""CJK Tokenizer - comparing terms for record with chinese poetry in title index, reverse table"""
query = "SELECT * from test_idxWORD%02dR where id_bibrec='104'" % get_index_id_from_index_name('title')
res = run_sql(query)
iset = []
if res:
iset = deserialize_via_marshal(res[0][1])
self.assertEqual(iset, ['\xe6\x95\xac', '\xe7\x8d\xa8', '\xe4\xba\xad', '\xe5\x9d\x90'])
class BibIndexAuthorityRecordTest(InvenioTestCase):
"""Test if BibIndex correctly knows when to update the index for a
bibliographic record if it is dependent upon an authority record changed
within the given date range"""
def test_authority_record_recently_updated(self):
"""bibindex - reindexing after recently changed authority record"""
from invenio.legacy.bibindex.engine_config import (
CFG_BIBINDEX_ADDING_RECORDS_STARTED_STR,
CFG_BIBINDEX_UPDATE_MESSAGE)
authRecID = 118
bibRecID = 9
index_name = 'author'
table = "idxWORD%02dF" % get_index_id_from_index_name(index_name)
reindex_for_type_with_bibsched(index_name)
run_sql("UPDATE bibrec SET modification_date = now() WHERE id = %s", (authRecID,))
# run bibindex again
task_id = reindex_for_type_with_bibsched(index_name, force_all=True)
filename = os.path.join(cfg['CFG_LOGDIR'], 'bibsched_task_' + str(task_id) + '.log')
_file = open(filename)
text = _file.read() # small file
_file.close()
self.assertTrue(text.find(CFG_BIBINDEX_UPDATE_MESSAGE) >= 0)
self.assertTrue(text.find(CFG_BIBINDEX_ADDING_RECORDS_STARTED_STR % (table, 1, get_max_recid())) >= 0)
def test_authority_record_enriched_index(self):
"""bibindex - test whether reverse index for bibliographic record
contains words from referenced authority records"""
bibRecID = 9
authority_string = 'jonathan'
index_name = 'author'
table = "idxWORD%02dR" % get_index_id_from_index_name(index_name)
reindex_for_type_with_bibsched(index_name, force_all=True)
self.assertTrue(
authority_string in deserialize_via_marshal(
run_sql("SELECT termlist FROM %s WHERE id_bibrec = %s" % (table, bibRecID))[0][0]
)
)
def test_indexing_of_deleted_authority_record(self):
"""bibindex - no info for indexing from deleted authority record"""
recID = 119 # deleted record
control_nos = get_control_nos_from_recID(recID)
info = get_index_strings_by_control_no(control_nos[0])
self.assertEqual([], info)
def test_authority_record_get_values_by_bibrecID_from_tag(self):
"""bibindex - find authors in authority records for given bibrecID"""
tags = ['100__a']
bibRecID = 9
values = []
for tag in tags:
authority_tag = tag[0:3] + "__0"
control_nos = get_fieldvalues(bibRecID, authority_tag)
for control_no in control_nos:
new_strings = get_index_strings_by_control_no(control_no)
values.extend(new_strings)
self.assertTrue('Ellis, Jonathan Richard' in values)
def insert_record_one_and_second_revision():
"""Inserts test record no. 1 and a second revision for that record"""
rev1 = """<record>
<controlfield tag="001">123456789</controlfield>
<controlfield tag="005">20110101000000.0</controlfield>
<datafield tag ="100" ind1=" " ind2=" ">
<subfield code="a">Close, John</subfield>
<subfield code="u">DESY</subfield>
</datafield>
<datafield tag="245" ind1=" " ind2=" ">
<subfield code="a">Particles world</subfield>
</datafield>
</record>"""
rev1_final = rev1.replace('<controlfield tag="001">123456789</controlfield>','')
rev1_final = rev1_final.replace('<controlfield tag="005">20110101000000.0</controlfield>','')
rev2 = rev1.replace('<subfield code="a">Close, John</subfield>', '<subfield code="a">Dawkins, Richard</subfield>')
rev2 = rev2.replace('Particles world', 'Particles universe')
rec1 = xml_marc_to_records(rev1_final)
res = bibupload(rec1[0], opt_mode='insert')
_id = res[1]
rec = get_record(_id)
_rev = record_get_field_value(rec, '005', '', '')
#need to index for the first time
indexes = get_all_indexes(virtual=False)
wtabs = get_word_tables(indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
wordTable.add_recIDs([[_id, _id]], 10000)
#upload the second revision, but don't index
rev2_final = rev2.replace('123456789', str(_id))
rev2_final = rev2_final.replace('20110101000000.0', _rev)
rec2 = xml_marc_to_records(rev2_final)
res = bibupload(rec2[0], opt_mode='correct')
return _id
def insert_record_two_and_second_revision():
"""Inserts test record no. 2 and a revision for that record"""
rev1 = """<record>
<controlfield tag="001">123456789</controlfield>
<controlfield tag="005">20110101000000.0</controlfield>
<datafield tag ="100" ind1=" " ind2=" ">
<subfield code="a">Locke, John</subfield>
<subfield code="u">UNITRA</subfield>
</datafield>
<datafield tag="245" ind1=" " ind2=" ">
<subfield code="a">Collision course</subfield>
</datafield>
</record>"""
rev1_final = rev1.replace('<controlfield tag="001">123456789</controlfield>','')
rev1_final = rev1_final.replace('<controlfield tag="005">20110101000000.0</controlfield>','')
rev2 = rev1.replace('Collision course', 'Course of collision')
rec1 = xml_marc_to_records(rev1_final)
res = bibupload(rec1[0], opt_mode='insert')
id_bibrec = res[1]
rec = get_record(id_bibrec)
_rev = record_get_field_value(rec, '005', '', '')
#need to index for the first time
indexes = get_all_indexes(virtual=False)
wtabs = get_word_tables(indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
wordTable.add_recIDs([[id_bibrec, id_bibrec]], 10000)
#upload the second revision, but don't index
rev2_final = rev2.replace('123456789', str(id_bibrec))
rev2_final = rev2_final.replace('20110101000000.0', _rev)
rec2 = xml_marc_to_records(rev2_final)
res = bibupload(rec2[0], opt_mode='correct')
return id_bibrec
def create_index_tables(index_id):
query_create = """CREATE TABLE IF NOT EXISTS idxWORD%02dF (
id mediumint(9) unsigned NOT NULL auto_increment,
term varchar(50) default NULL,
hitlist longblob,
PRIMARY KEY (id),
UNIQUE KEY term (term)
) ENGINE=MyISAM"""
query_create_r = """CREATE TABLE IF NOT EXISTS idxWORD%02dR (
id_bibrec mediumint(9) unsigned NOT NULL,
termlist longblob,
type enum('CURRENT','FUTURE','TEMPORARY') NOT NULL default 'CURRENT',
PRIMARY KEY (id_bibrec,type)
) ENGINE=MyISAM"""
run_sql(query_create % index_id)
run_sql(query_create_r % index_id)
def drop_index_tables(index_id):
query_drop = """DROP TABLE IF EXISTS idxWORD%02d%s"""
run_sql(query_drop % (index_id, "F"))
run_sql(query_drop % (index_id, "R"))
def create_virtual_index(index_id, dependent_indexes):
"""creates new virtual index and binds it to specific dependent indexes"""
query = """INSERT INTO idxINDEX (id, name, tokenizer) VALUES (%s, 'testindex', 'BibIndexDefaultTokenizer')"""
run_sql(query % index_id)
query = """INSERT INTO idxINDEX_idxINDEX VALUES (%s, %s)"""
for index in dependent_indexes:
run_sql(query % (index_id, get_index_id_from_index_name(index)))
create_index_tables(index_id)
def remove_virtual_index(index_id):
"""removes tables and other traces after virtual index"""
drop_index_tables(index_id)
query = """DELETE FROM idxINDEX WHERE id=%s""" % index_id
run_sql(query)
query = """DELETE FROM idxINDEX_idxINDEX WHERE id_virtual=%s"""
run_sql(query % index_id)
class BibIndexFindingAffectedIndexes(InvenioTestCase):
"""
Checks if function 'find_affected_records_for_index'
works correctly.
"""
counter = 0
indexes = ['global', 'fulltext', 'caption', 'journal', 'miscellaneous', 'reportnumber', 'year']
@classmethod
def setUp(self):
if self.counter == 0:
self.last_updated = dict(get_last_updated_all_indexes())
res = run_sql("SELECT job_date FROM hstRECORD WHERE id_bibrec=10 AND affected_fields<>''")
self.hst_date = res[0][0]
date_to_set = self.hst_date - timedelta(seconds=1)
for index in self.indexes:
run_sql("""UPDATE idxINDEX SET last_updated=%s
WHERE name=%s""", (str(date_to_set), index))
@classmethod
def tearDown(self):
self.counter += 1
if self.counter >= 8:
for index in self.indexes:
run_sql("""UPDATE idxINDEX SET last_updated=%s
WHERE name=%s""", (self.last_updated[index], index))
def test_find_proper_indexes(self):
"""bibindex - checks if affected indexes are found correctly"""
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[[1,20]])
self.assertEqual(sorted(['miscellaneous', 'fulltext', 'caption', 'journal', 'reportnumber', 'year']),
sorted(records_for_indexes.keys()))
def test_find_proper_recrods_for_miscellaneous_index(self):
"""bibindex - checks if affected recids are found correctly for miscellaneous index"""
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[[1,20]])
self.assertEqual(records_for_indexes['miscellaneous'],
[1, 2, 3, 4, 5, 6, 7, 10, 12, 14, 17, 18])
def test_find_proper_records_for_year_index(self):
"""bibindex - checks if affected recids are found correctly for year index"""
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[[1,20]])
self.assertEqual(records_for_indexes['year'],
[1, 2, 3, 4, 5, 6, 7, 10, 12, 14, 17, 18])
def test_find_proper_records_for_caption_index(self):
"""bibindex - checks if affected recids are found correctly for caption index"""
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[[1,100]])
self.assertEqual(records_for_indexes['caption'],
[1, 2, 3, 4, 5, 6, 7, 10, 12, 55, 98])
def test_find_proper_records_for_journal_index(self):
"""bibindex - checks if affected recids are found correctly for journal index"""
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[[1,100]])
self.assertEqual(records_for_indexes['journal'],
[6, 7, 10, 17, 18])
def test_find_proper_records_specified_only_year(self):
"""bibindex - checks if affected recids are found correctly for year index if we specify only year index as input"""
records_for_indexes = find_affected_records_for_index(["year"], [[1, 100]])
self.assertEqual(records_for_indexes["year"],
[1, 2, 3, 4, 5, 6, 7, 10, 12, 14, 17, 18, 55])
def test_find_proper_records_force_all(self):
"""bibindex - checks if all recids will be assigned to all specified indexes"""
records_for_indexes = find_affected_records_for_index(["year", "title"], [[10, 15]], True)
self.assertEqual(records_for_indexes["year"], records_for_indexes["title"])
self.assertEqual(records_for_indexes["year"], [10, 11, 12, 13, 14, 15])
def test_find_proper_records_nothing_for_title_index(self):
"""bibindex - checks if nothing was found for title index in range of records: 1 - 20"""
records_for_indexes = find_affected_records_for_index(["title"], [[1, 20]])
self.assertRaises(KeyError, lambda :records_for_indexes["title"])
class BibIndexIndexingAffectedIndexes(InvenioTestCase):
started = False
records = []
counter = 0
@classmethod
def setUp(self):
self.counter += 1
if not self.started:
self.records.append(insert_record_one_and_second_revision())
self.records.append(insert_record_two_and_second_revision())
records_for_indexes = find_affected_records_for_index(get_all_indexes(virtual=False),
[self.records])
wtabs = get_word_tables(records_for_indexes.keys())
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
wordTable.add_recIDs([self.records], 10000)
self.started = True
@classmethod
def tearDown(self):
if self.counter == 3:
for rec in self.records:
wipe_out_record_from_all_tables(rec)
indexes = get_all_indexes(virtual=False)
wtabs = get_word_tables(indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type = CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
wordTable.del_recIDs([self.records])
def test_proper_content_in_title_index(self):
"""bibindex - checks reindexation of title index for test records.."""
index_id = get_index_id_from_index_name('title')
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec IN (""" % (index_id,)
query = query + ", ".join(map(str, self.records)) + ")"
resp = run_sql(query)
affiliation_rec1 = deserialize_via_marshal(resp[0][0])
affiliation_rec2 = deserialize_via_marshal(resp[1][0])
self.assertEqual(['univers', 'particl'], affiliation_rec1)
self.assertEqual(['of', 'cours', 'collis'], affiliation_rec2)
def test_proper_content_in_author_index(self):
"""bibindex - checks reindexation of author index for test records.."""
index_id = get_index_id_from_index_name('author')
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec IN (""" % (index_id,)
query = query + ", ".join(map(str, self.records)) + ")"
resp = run_sql(query)
author_rec1 = deserialize_via_marshal(resp[0][0])
author_rec2 = deserialize_via_marshal(resp[1][0])
self.assertEqual(['dawkins', 'richard', ], author_rec1)
self.assertEqual(['john', 'locke'], author_rec2)
def test_proper_content_in_global_index(self):
"""bibindex - checks reindexation of global index for test records.."""
index_id = get_index_id_from_index_name('global')
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec IN (""" % (index_id,)
query = query + ", ".join(map(str, self.records)) + ")"
resp = run_sql(query)
global_rec1 = deserialize_via_marshal(resp[0][0])
global_rec2 = deserialize_via_marshal(resp[1][0])
self.assertEqual(True, 'dawkin' in global_rec1)
self.assertEqual(False, 'close' in global_rec1)
self.assertEqual(True, 'univers' in global_rec1)
self.assertEqual(True, 'john' in global_rec2)
self.assertEqual(False, 'john' in global_rec1)
class BibIndexFindingIndexesForTags(InvenioTestCase):
""" Tests function 'get_tag_indexes' """
def test_fulltext_tag_virtual_indexes_on(self):
"""bibindex - checks if 'get_tag_indexes' for tag 8564_u will find only 'fulltext' index"""
self.assertEqual(('fulltext',), zip(*get_tag_indexes('8564_u'))[1])
def test_title_tag_virtual_indexes_on(self):
"""bibindex - checks if 'get_tag_indexes' for tag 245__% will find also 'global' index"""
self.assertEqual(('title', 'exacttitle', 'global'), zip(*get_tag_indexes('245__%'))[1])
def test_title_tag_virtual_indexes_off(self):
"""bibindex - checks if 'get_tag_indexes' for tag 245__% wont find 'global' index (with virtual=False)"""
self.assertEqual(('title', 'exacttitle'), zip(*get_tag_indexes('245__%', virtual=False))[1])
def test_author_tag_virtual_indexes_on(self):
"""bibindex - checks 'get_tag_indexes' for tag '100'"""
self.assertEqual(('author', 'affiliation', 'exactauthor', 'firstauthor',
'exactfirstauthor', 'authorcount', 'authorityauthor',
'miscellaneous', 'global'),
zip(*get_tag_indexes('100'))[1])
def test_author_exact_tag_virtual_indexes_off(self):
"""bibindex - checks 'get_tag_indexes' for tag '100__a'"""
self.assertEqual(('author', 'exactauthor', 'firstauthor',
'exactfirstauthor', 'authorcount',
'authorityauthor', 'miscellaneous'),
zip(*get_tag_indexes('100__a', virtual=False))[1])
def test_wide_tag_virtual_indexes_off(self):
"""bibindex - checks 'get_tag_indexes' for tag like '86%'"""
self.assertEqual(('miscellaneous',), zip(*get_tag_indexes('86%', virtual=False))[1])
def test_909_tags_in_misc_index(self):
"""bibindex - checks connection between misc index and tags: 909C1%, 909C4%"""
self.assertEqual(('miscellaneous',), zip(*get_tag_indexes('909C1%', virtual=False))[1])
self.assertEqual('miscellaneous' in zip(*get_tag_indexes('909C4%', virtual=False))[1], False)
def test_year_tag_virtual_indexes_on(self):
"""bibindex - checks 'get_tag_indexes' for tag 909C0y"""
self.assertEqual(('year', 'global'), zip(*get_tag_indexes('909C0y'))[1])
def test_wide_tag_authority_index_virtual_indexes_off(self):
"""bibindex - checks 'get_tag_indexes' for tag like '15%'"""
self.assertEqual(('authoritysubject', 'miscellaneous'), zip(*get_tag_indexes('15%',virtual=False))[1])
class BibIndexFindingTagsForIndexes(InvenioTestCase):
""" Tests function 'get_index_tags' """
def test_tags_for_author_index(self):
"""bibindex - checks if 'get_index_tags' find proper tags for 'author' index """
self.assertEqual(get_index_tags('author'), ['100__a', '700__a'])
def test_tags_for_global_index_virtual_indexes_off(self):
"""bibindex - checks if 'get_index_tags' find proper tags for 'global' index """
self.assertEqual(get_index_tags('global', virtual=False),[])
def test_tags_for_global_index_virtual_indexes_on(self):
"""bibindex - checks if 'get_index_tags' find proper tags for 'global' index """
tags = get_index_tags('global')
self.assertEqual('86%' in tags, True)
self.assertEqual('100__a' in tags, True)
self.assertEqual('245__%' in tags, True)
class BibIndexGlobalIndexContentTest(InvenioTestCase):
""" Tests if virtual global index is correctly indexed"""
def is_part_of(self, container, content):
"""checks if content is a part of container"""
ctr = set(container)
cont = set(content)
return cont.issubset(ctr)
def test_title_index_compatibility_reversed_table(self):
"""bibindex - checks if the same words are in title and global index, reversed table"""
global_id = get_index_id_from_index_name('global')
title_id = get_index_id_from_index_name('title')
for rec in range(1, 4):
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (title_id, rec)
res = run_sql(query)
termlist_title = deserialize_via_marshal(res[0][0])
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (global_id, rec)
glob = run_sql(query)
termlist_global = deserialize_via_marshal(glob[0][0])
self.assertEqual(self.is_part_of(termlist_global, termlist_title), True)
def test_abstract_index_compatibility_reversed_table(self):
"""bibindex - checks if the same words are in abstract and global index, reversed table"""
global_id = get_index_id_from_index_name('global')
abstract_id = get_index_id_from_index_name('abstract')
for rec in range(6, 9):
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (abstract_id, rec)
res = run_sql(query)
termlist_abstract = deserialize_via_marshal(res[0][0])
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (global_id, rec)
glob = run_sql(query)
termlist_global = deserialize_via_marshal(glob[0][0])
self.assertEqual(self.is_part_of(termlist_global, termlist_abstract), True)
def test_misc_index_compatibility_reversed_table(self):
"""bibindex - checks if the same words are in misc and global index, reversed table"""
global_id = get_index_id_from_index_name('global')
misc_id = get_index_id_from_index_name('miscellaneous')
for rec in range(10, 14):
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (misc_id, rec)
res = run_sql(query)
termlist_misc = deserialize_via_marshal(res[0][0])
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s""" % (global_id, rec)
glob = run_sql(query)
termlist_global = deserialize_via_marshal(glob[0][0])
self.assertEqual(self.is_part_of(termlist_global, termlist_misc), True)
def test_journal_index_compatibility_forward_table(self):
"""bibindex - checks if the same words are in journal and global index, forward table"""
global_id = get_index_id_from_index_name('global')
journal_id = get_index_id_from_index_name('journal')
query = """SELECT term FROM idxWORD%02dF""" % journal_id
res = zip(*run_sql(query))[0]
query = """SELECT term FROM idxWORD%02dF""" % global_id
glob = zip(*run_sql(query))[0]
self.assertEqual(self.is_part_of(glob, res), True)
def test_keyword_index_compatibility_forward_table(self):
"""bibindex - checks if the same pairs are in keyword and global index, forward table"""
global_id = get_index_id_from_index_name('global')
keyword_id = get_index_id_from_index_name('keyword')
query = """SELECT term FROM idxPAIR%02dF""" % keyword_id
res = zip(*run_sql(query))[0]
query = """SELECT term FROM idxPAIR%02dF""" % global_id
glob = zip(*run_sql(query))[0]
self.assertEqual(self.is_part_of(glob, res), True)
def test_affiliation_index_compatibility_forward_table(self):
"""bibindex - checks if the same phrases are in affiliation and global index, forward table"""
global_id = get_index_id_from_index_name('global')
affiliation_id = get_index_id_from_index_name('affiliation')
query = """SELECT term FROM idxPHRASE%02dF""" % affiliation_id
res = zip(*run_sql(query))[0]
query = """SELECT term FROM idxPHRASE%02dF""" % global_id
glob = zip(*run_sql(query))[0]
self.assertEqual(self.is_part_of(glob, res), True)
class BibIndexVirtualIndexAlsoChangesTest(InvenioTestCase):
""" Tests if virtual index changes after changes in dependent index"""
counter = 0
indexes = ["title"]
_id = 39
@classmethod
def prepare_virtual_index(self):
"""creates new virtual index and binds it to specific normal index"""
create_virtual_index(self._id, self.indexes)
wtabs = get_word_tables(self.indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
wordTable.add_recIDs([[1, 10]], 1000)
@classmethod
def reindex_virtual_index(self, special_tokenizer=False):
"""reindexes virtual and dependent indexes with different tokenizer"""
def tokenize_for_words(phrase):
return phrase.split(" ")
wtabs = get_word_tables(self.indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexEmptyTokenizer"},
wash_index_terms=50)
if special_tokenizer == True:
wordTable.default_tokenizer_function = tokenize_for_words
wordTable.add_recIDs([[1, 10]], 1000)
@classmethod
def setUp(self):
self.counter += 1
if self.counter == 1:
self.prepare_virtual_index()
elif self.counter == 2:
self.reindex_virtual_index(special_tokenizer=True)
@classmethod
def tearDown(self):
if self.counter == 3:
self.reindex_virtual_index()
elif self.counter == 4:
remove_virtual_index(self._id)
def test_virtual_index_1_has_10_records(self):
"""bibindex - checks if virtual index was filled with only ten records from title index"""
query = "SELECT count(*) FROM idxWORD%02dR" % self._id
self.assertEqual(10, run_sql(query)[0][0])
def test_virtual_index_2_correct_content_record_1(self):
"""bibindex - after reindexing with different tokenizer virtual index also changes - record 1"""
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s" % (self._id, 1)
self.assertEqual('Higgs' in deserialize_via_marshal(run_sql(query)[0][0]), True)
def test_virtual_index_3_correct_content_record_3(self):
"""bibindex - after reindexing with different tokenizer virtual index also changes - record 3"""
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s" % (self._id, 3)
self.assertEqual(['Conference', 'Biology', 'Molecular', 'European'],
deserialize_via_marshal(run_sql(query)[0][0]))
def test_virtual_index_4_cleaned_up(self):
"""bibindex - after reindexing with normal title tokenizer everything is back to normal"""
#this is version of test for installation with PyStemmer package
#without this package word 'biology' is stemmed differently
query = "SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=%s" % (self._id, 3)
self.assertEqual(['biolog', 'molecular', 'confer', 'european'],
deserialize_via_marshal(run_sql(query)[0][0]))
class BibIndexVirtualIndexRemovalTest(InvenioTestCase):
counter = 0
indexes = ["authorcount", "journal", "year"]
_id = 40
@classmethod
def setUp(self):
self.counter += 1
if self.counter == 1:
create_virtual_index(self._id, self.indexes)
wtabs = get_word_tables(self.indexes)
for index_id, index_name, index_tags in wtabs:
wordTable = WordTable(index_name=index_name,
index_id=index_id,
fields_to_index=index_tags,
table_name_pattern='idxWORD%02dF',
wordtable_type=CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"],
tag_to_tokenizer_map={'8564_u': "BibIndexFulltextTokenizer"},
wash_index_terms=50)
wordTable.add_recIDs([[1, 113]], 1000)
#removal part
w = WordTable("testindex", self._id, [], "idxWORD%02dF", CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"], {}, 50)
w.remove_dependent_index(int(get_index_id_from_index_name("authorcount")))
@classmethod
def tearDown(self):
if self.counter == 9:
remove_virtual_index(self._id)
def test_authorcount_removal_number_of_items(self):
"""bibindex - checks virtual index after authorcount index removal - number of items"""
query = """SELECT count(*) FROM idxWORD%02dF"""
res = run_sql(query % self._id)
self.assertEqual(157, res[0][0])
def test_authorcount_removal_common_terms_intact(self):
"""bibindex - checks virtual index after authorcount index removal - common terms"""
query = """SELECT term FROM idxWORD%02dF WHERE term IN ('10', '2', '4', '7')"""
res = run_sql(query % self._id)
self.assertEqual(4, len(res))
def test_authorcount_removal_no_315_term(self):
"""bibindex - checks virtual index after authorcount index removal - no '315' term in virtual index"""
query = """SELECT term FROM idxWORD%02dF WHERE term='315'"""
res = run_sql(query % self._id)
self.assertEqual(0, len(res))
def test_authorcount_removal_term_10_hitlist(self):
"""bibindex - checks virtual index after authorcount index removal - hitlist for '10' term"""
query = """SELECT hitlist FROM idxWORD%02dF WHERE term='10'"""
res = run_sql(query % self._id)
self.assertEqual([80, 92], intbitset(res[0][0]).tolist())
def test_authorcount_removal_term_1985_hitlist(self):
"""bibindex - checks virtual index after authorcount index removal - hitlist for '1985' term"""
query = """SELECT hitlist FROM idxWORD%02dF WHERE term='1985'"""
res = run_sql(query % self._id)
self.assertEqual([16, 18], intbitset(res[0][0]).tolist())
def test_authorcount_removal_record_16_hitlist(self):
"""bibindex - checks virtual index after authorcount index removal - termlist for record 16"""
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=16"""
res = run_sql(query % self._id)
self.assertEqual(['1985'], deserialize_via_marshal(res[0][0]))
def test_authorcount_removal_record_10_hitlist(self):
"""bibindex - checks virtual index after authorcount index removal - termlist for record 10"""
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=10"""
res = run_sql(query % self._id)
self.assertEqual(['2002', 'Eur. Phys. J., C'], deserialize_via_marshal(res[0][0]))
def test_year_removal_number_of_items(self):
"""bibindex - checks virtual index after year removal - number of items"""
#must be run after: tearDown
w = WordTable("testindex", self._id, [], "idxWORD%02dF", CFG_BIBINDEX_INDEX_TABLE_TYPE["Words"], {}, 50)
w.remove_dependent_index(int(get_index_id_from_index_name("year")))
query = """SELECT count(*) FROM idxWORD%02dF"""
res = run_sql(query % self._id)
self.assertEqual(134, res[0][0])
def test_year_removal_record_18_hitlist(self):
"""bibindex - checks virtual index after year removal - termlist for record 18"""
#must be run after: tearDown, test_year_removal_number_of_items
query = """SELECT termlist FROM idxWORD%02dR WHERE id_bibrec=18"""
res = run_sql(query % self._id)
self.assertEqual(['151', '357','1985', 'Phys. Lett., B 151 (1985) 357', 'Phys. Lett., B'],
deserialize_via_marshal(res[0][0]))
class BibIndexCLICallTest(InvenioTestCase):
"""Tests if calls to bibindex from CLI (bibsched deamon) are run correctly"""
def test_correct_message_for_wrong_index_names(self):
"""bibindex - checks if correct message for wrong index appears"""
index_name = "titlexrg"
task_id = reindex_for_type_with_bibsched(index_name, force_all=True)
filename = os.path.join(cfg['CFG_LOGDIR'], 'bibsched_task_' + str(task_id) + '.log')
fl = open(filename)
text = fl.read() # small file
fl.close()
self.assertTrue(text.find("Specified indexes can't be found.") >= 0)
def test_correct_message_for_up_to_date_indexes(self):
"""bibindex - checks if correct message for index up to date appears"""
index_name = "abstract"
task_id = reindex_for_type_with_bibsched(index_name)
filename = os.path.join(cfg['CFG_LOGDIR'], 'bibsched_task_' + str(task_id) + '.log')
fl = open(filename)
text = fl.read() # small file
fl.close()
self.assertTrue(text.find("Selected indexes/recIDs are up to date.") >= 0)
TEST_SUITE = make_test_suite(BibIndexRemoveStopwordsTest,
BibIndexRemoveLatexTest,
BibIndexRemoveHtmlTest,
BibIndexYearIndexTest,
BibIndexAuthorCountIndexTest,
BibIndexItemCountIndexTest,
BibIndexFiletypeIndexTest,
BibIndexJournalIndexTest,
BibIndexCJKTokenizerTitleIndexTest,
BibIndexAuthorityRecordTest,
BibIndexFindingAffectedIndexes,
BibIndexIndexingAffectedIndexes,
BibIndexFindingIndexesForTags,
BibIndexFindingTagsForIndexes,
BibIndexGlobalIndexContentTest,
BibIndexVirtualIndexAlsoChangesTest,
BibIndexVirtualIndexRemovalTest,
BibIndexCLICallTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
| gpl-2.0 |
xujb/odoo | addons/mrp/company.py | 381 | 1383 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'manufacturing_lead': fields.float('Manufacturing Lead Time', required=True,
help="Security days for each manufacturing operation."),
}
_defaults = {
'manufacturing_lead': lambda *a: 1.0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
x303597316/hue | apps/useradmin/src/useradmin/views.py | 13 | 36325 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pwd
import grp
import logging
import threading
import subprocess
import json
import ldap
import ldap_access
from django.contrib.auth.models import User, Group
from desktop.lib.django_util import JsonResponse, render
from desktop.lib.exceptions_renderable import PopupException
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from django.forms.util import ErrorList
from django.shortcuts import redirect
from django.http import HttpResponse
import desktop.conf
from desktop.conf import LDAP
from hadoop.fs.exceptions import WebHdfsException
from useradmin.models import HuePermission, UserProfile, LdapGroup
from useradmin.models import get_profile, get_default_user_group
from useradmin.forms import SyncLdapUsersGroupsForm, AddLdapGroupsForm, AddLdapUsersForm,\
PermissionsEditForm, GroupEditForm, SuperUserChangeForm, UserChangeForm, validate_username,\
PasswordChangeForm
LOG = logging.getLogger(__name__)
__users_lock = threading.Lock()
__groups_lock = threading.Lock()
def list_users(request):
is_ldap_setup = bool(LDAP.LDAP_SERVERS.get()) or LDAP.LDAP_URL.get() is not None
return render("list_users.mako", request, {
'users': User.objects.all(),
'users_json': json.dumps(list(User.objects.values_list('id', flat=True))),
'request': request,
'is_ldap_setup': is_ldap_setup
})
def list_groups(request):
is_ldap_setup = bool(LDAP.LDAP_SERVERS.get()) or LDAP.LDAP_URL.get() is not None
return render("list_groups.mako", request, {
'groups': Group.objects.all(),
'groups_json': json.dumps(list(Group.objects.values_list('name', flat=True))),
'is_ldap_setup': is_ldap_setup
})
def list_permissions(request):
return render("list_permissions.mako", request, dict(permissions=HuePermission.objects.all()))
def list_for_autocomplete(request):
if request.ajax:
extended_user_object = request.GET.get('extend_user') == 'true'
users = User.objects.all().order_by('username')
if not request.GET.get('include_myself'):
users = users.exclude(pk=request.user.pk)
groups = Group.objects.all().order_by('name')
if request.GET.get('only_mygroups'):
groups = request.user.groups.all()
users = users[:2000]
groups = groups[:2000]
response = {
'users': massage_users_for_json(users, extended_user_object),
'groups': massage_groups_for_json(groups)
}
return JsonResponse(response)
return HttpResponse("")
def massage_users_for_json(users, extended=False):
simple_users = []
for user in users:
appendable = {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'email': user.email
}
if extended:
appendable['groups'] = massage_groups_for_json(user.groups.all())
simple_users.append(appendable)
return simple_users
def massage_groups_for_json(groups):
simple_groups = []
for group in groups:
simple_groups.append({
'id': group.id,
'name': group.name
})
return simple_groups
def delete_user(request):
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to delete users."), error_code=401)
if request.method != 'POST':
raise PopupException(_('A POST request is required.'))
ids = request.POST.getlist('user_ids')
global __users_lock
__users_lock.acquire()
try:
if str(request.user.id) in ids:
raise PopupException(_("You cannot remove yourself."), error_code=401)
UserProfile.objects.filter(user__id__in=ids).delete()
User.objects.filter(id__in=ids).delete()
finally:
__users_lock.release()
request.info(_('The users were deleted.'))
return redirect(reverse(list_users))
def delete_group(request):
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to delete groups."), error_code=401)
if request.method == 'POST':
try:
group_names = request.POST.getlist('group_names')
# Get the default group before getting the group, because we may be
# trying to delete the default group, and it may not have been created yet.
default_group = get_default_user_group()
if default_group is not None and default_group.name in group_names:
raise PopupException(_("The default user group may not be deleted."), error_code=401)
Group.objects.filter(name__in=group_names).delete()
request.info(_('The groups were deleted.'))
return redirect(reverse(list_groups))
except Group.DoesNotExist:
raise PopupException(_("Group not found."), error_code=404)
else:
return render("delete_group.mako", request, {'path': request.path})
def require_change_password(self):
""" Return true if user has never logged in before. """
if 'desktop.auth.backend.AllowFirstUserDjangoBackend' in desktop.conf.AUTH.BACKEND.get() and \
self.first_login and desktop.conf.AUTH.CHANGE_DEFAULT_PASSWORD.get():
return True
def edit_user(request, username=None):
"""
edit_user(request, username = None) -> reply
@type request: HttpRequest
@param request: The request object
@type username: string
@param username: Default to None, when creating a new user
"""
if request.user.username != username and not request.user.is_superuser:
raise PopupException(_("You must be a superuser to add or edit another user."), error_code=401)
userprofile = get_profile(request.user)
if username is not None:
instance = User.objects.get(username=username)
else:
instance = None
if require_change_password(userprofile):
form_class = PasswordChangeForm
elif request.user.is_superuser:
form_class = SuperUserChangeForm
else:
form_class = UserChangeForm
if request.method == 'POST':
form = form_class(request.POST, instance=instance)
if request.user.is_superuser and request.user.username != username:
form.fields.pop("password_old")
if form.is_valid(): # All validation rules pass
if instance is None:
instance = form.save()
get_profile(instance)
else:
if username != form.instance.username:
raise PopupException(_("You cannot change a username."), error_code=401)
if request.user.username == username and not form.instance.is_active:
raise PopupException(_("You cannot make yourself inactive."), error_code=401)
global __users_lock
__users_lock.acquire()
try:
# form.instance (and instance) now carry the new data
orig = User.objects.get(username=username)
if orig.is_superuser:
if not form.instance.is_superuser or not form.instance.is_active:
_check_remove_last_super(orig)
else:
if form.instance.is_superuser and not request.user.is_superuser:
raise PopupException(_("You cannot make yourself a superuser."), error_code=401)
# All ok
form.save()
request.info(_('User information updated'))
finally:
__users_lock.release()
# Ensure home directory is created, if necessary.
if form.cleaned_data.get('ensure_home_directory'):
try:
ensure_home_directory(request.fs, instance.username)
except (IOError, WebHdfsException), e:
request.error(_('Cannot make home directory for user %s.') % instance.username)
if require_change_password(userprofile):
userprofile.first_login = False
userprofile.save()
if request.user.is_superuser:
return redirect(reverse('about:index'))
else:
return redirect(reverse('desktop.views.home'))
elif request.user.is_superuser:
return redirect(reverse(list_users))
else:
return redirect(reverse(edit_user, kwargs={'username': username}))
else:
default_user_group = get_default_user_group()
initial = {
'ensure_home_directory': instance is None,
'groups': default_user_group and [default_user_group] or []
}
form = form_class(instance=instance, initial=initial)
if request.user.is_superuser and request.user.username != username:
form.fields.pop("password_old")
if require_change_password(userprofile):
return render('change_password.mako', request, dict(form=form, username=username))
else:
return render('edit_user.mako', request, dict(form=form, username=username))
def edit_group(request, name=None):
"""
edit_group(request, name = None) -> reply
@type request: HttpRequest
@param request: The request object
@type name: string
@param name: Default to None, when creating a new group
Only superusers may create a group
"""
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to add or edit a group."), error_code=401)
if name is not None:
instance = Group.objects.get(name=name)
else:
instance = None
if request.method == 'POST':
form = GroupEditForm(request.POST, instance=instance)
if form.is_valid():
form.save()
request.info(_('Group information updated'))
return list_groups(request)
else:
form = GroupEditForm(instance=instance)
return render('edit_group.mako', request, dict(form=form, action=request.path, name=name))
def edit_permission(request, app=None, priv=None):
"""
edit_permission(request, app = None, priv = None) -> reply
@type request: HttpRequest
@param request: The request object
@type app: string
@param app: Default to None, specifies the app of the privilege
@type priv: string
@param priv Default to None, the action of the privilege
Only superusers may modify permissions
"""
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to change permissions."), error_code=401)
instance = HuePermission.objects.get(app=app, action=priv)
if request.method == 'POST':
form = PermissionsEditForm(request.POST, instance=instance)
if form.is_valid():
form.save()
request.info(_('Permission information updated'))
return render("list_permissions.mako", request, dict(permissions=HuePermission.objects.all()))
else:
form = PermissionsEditForm(instance=instance)
return render('edit_permissions.mako', request, dict(form=form, action=request.path, app=app, priv=priv))
def add_ldap_users(request):
"""
add_ldap_users(request) -> reply
Handler for importing LDAP users into the Hue database.
If a user has been previously imported, this will sync their user information.
If the LDAP request failed, the error message is generic right now.
"""
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to add another user."), error_code=401)
if request.method == 'POST':
form = AddLdapUsersForm(request.POST)
if form.is_valid():
username_pattern = form.cleaned_data['username_pattern']
import_by_dn = form.cleaned_data['dn']
server = form.cleaned_data.get('server')
try:
connection = ldap_access.get_connection_from_server(server)
users = import_ldap_users(connection, username_pattern, False, import_by_dn)
except ldap.LDAPError, e:
LOG.error("LDAP Exception: %s" % e)
raise PopupException(_('There was an error when communicating with LDAP'), detail=str(e))
except AssertionError, e:
raise PopupException(_('There was a problem with some of the LDAP information'), detail=str(e))
if users and form.cleaned_data['ensure_home_directory']:
for user in users:
try:
ensure_home_directory(request.fs, user.username)
except (IOError, WebHdfsException), e:
request.error(_("Cannot make home directory for user %s.") % user.username)
if not users:
errors = form._errors.setdefault('username_pattern', ErrorList())
errors.append(_('Could not get LDAP details for users in pattern %s.') % username_pattern)
else:
return redirect(reverse(list_users))
else:
form = AddLdapUsersForm()
return render('add_ldap_users.mako', request, dict(form=form))
def add_ldap_groups(request):
"""
add_ldap_groups(request) -> reply
Handler for importing LDAP groups into the Hue database.
If a group has been previously imported, this will sync membership within the
group with the LDAP server. If --import-members is specified, it will import
all unimported users.
"""
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to add another group."), error_code=401)
if request.method == 'POST':
form = AddLdapGroupsForm(request.POST)
if form.is_valid():
groupname_pattern = form.cleaned_data['groupname_pattern']
import_by_dn = form.cleaned_data['dn']
import_members = form.cleaned_data['import_members']
import_members_recursive = form.cleaned_data['import_members_recursive']
is_ensuring_home_directories = form.cleaned_data['ensure_home_directories']
server = form.cleaned_data.get('server')
try:
connection = ldap_access.get_connection_from_server(server)
groups = import_ldap_groups(connection, groupname_pattern, import_members=import_members,
import_members_recursive=import_members_recursive, sync_users=True,
import_by_dn=import_by_dn)
except ldap.LDAPError, e:
LOG.error(_("LDAP Exception: %s") % e)
raise PopupException(_('There was an error when communicating with LDAP'), detail=str(e))
except AssertionError, e:
raise PopupException(_('There was a problem with some of the LDAP information'), detail=str(e))
unique_users = set()
if is_ensuring_home_directories and groups:
for group in groups:
for user in group.user_set.all():
unique_users.add(user)
for user in unique_users:
try:
ensure_home_directory(request.fs, user.username)
except (IOError, WebHdfsException), e:
raise PopupException(_("Exception creating home directory for LDAP user %s in group %s.") % (user, group), detail=e)
if groups:
return redirect(reverse(list_groups))
else:
errors = form._errors.setdefault('groupname_pattern', ErrorList())
errors.append(_('Could not get LDAP details for groups in pattern %s') % groupname_pattern)
else:
form = AddLdapGroupsForm()
return render('edit_group.mako', request, dict(form=form, action=request.path, ldap=True))
def sync_ldap_users_groups(request):
"""
Handler for syncing the Hue database with LDAP users and groups.
This will not import any users or groups that don't already exist in Hue. All
user information and group memberships will be updated based on the LDAP
server's current state.
"""
if not request.user.is_superuser:
raise PopupException(_("You must be a superuser to sync the LDAP users/groups."), error_code=401)
if request.method == 'POST':
form = SyncLdapUsersGroupsForm(request.POST)
if form.is_valid():
is_ensuring_home_directory = form.cleaned_data['ensure_home_directory']
server = form.cleaned_data.get('server')
connection = ldap_access.get_connection_from_server(server)
sync_ldap_users_and_groups(connection, is_ensuring_home_directory, request.fs)
return redirect(reverse(list_users))
else:
form = SyncLdapUsersGroupsForm()
return render("sync_ldap_users_groups.mako", request, dict(path=request.path, form=form))
def sync_ldap_users_and_groups(connection, is_ensuring_home_directory=False, fs=None):
try:
users = sync_ldap_users(connection)
groups = sync_ldap_groups(connection)
except ldap.LDAPError, e:
LOG.error("LDAP Exception: %s" % e)
raise PopupException(_('There was an error when communicating with LDAP'), detail=str(e))
# Create home dirs for every user sync'd
if is_ensuring_home_directory:
for user in users:
try:
ensure_home_directory(fs, user.username)
except (IOError, WebHdfsException), e:
raise PopupException(_("The import may not be complete, sync again."), detail=e)
def import_ldap_users(connection, user_pattern, sync_groups, import_by_dn, server=None):
return _import_ldap_users(connection, user_pattern, sync_groups=sync_groups, import_by_dn=import_by_dn, server=server)
def import_ldap_groups(connection, group_pattern, import_members, import_members_recursive, sync_users, import_by_dn):
return _import_ldap_groups(connection, group_pattern, import_members, import_members_recursive, sync_users, import_by_dn)
def sync_ldap_users(connection):
"""
Syncs LDAP user information. This will not import new
users from LDAP. It is also not possible to import both a user and a
group at the same time. Each must be a separate operation. If neither a user,
nor a group is provided, all users and groups will be synced.
"""
users = User.objects.filter(userprofile__creation_method=str(UserProfile.CreationMethod.EXTERNAL)).all()
for user in users:
_import_ldap_users(connection, user.username)
return users
def sync_ldap_groups(connection):
"""
Syncs LDAP group memberships. This will not import new
groups from LDAP. It is also not possible to import both a user and a
group at the same time. Each must be a separate operation. If neither a user,
nor a group is provided, all users and groups will be synced.
"""
groups = Group.objects.filter(group__in=LdapGroup.objects.all())
for group in groups:
_import_ldap_groups(connection, group.name)
return groups
def ensure_home_directory(fs, username):
"""
Adds a users home directory if it doesn't already exist.
Throws IOError, WebHdfsException.
"""
home_dir = '/user/%s' % username
fs.do_as_user(username, fs.create_home_dir, home_dir)
def sync_unix_users_and_groups(min_uid, max_uid, min_gid, max_gid, check_shell):
"""
Syncs the Hue database with the underlying Unix system, by importing users and
groups from 'getent passwd' and 'getent groups'. This should also pull in
users who are accessible via NSS.
"""
global __users_lock, __groups_lock
hadoop_groups = dict((group.gr_name, group) for group in grp.getgrall() \
if (group.gr_gid >= min_gid and group.gr_gid < max_gid) or group.gr_name == 'hadoop')
user_groups = dict()
__users_lock.acquire()
__groups_lock.acquire()
# Import groups
for name, group in hadoop_groups.iteritems():
try:
if len(group.gr_mem) != 0:
hue_group = Group.objects.get(name=name)
except Group.DoesNotExist:
hue_group = Group(name=name)
hue_group.save()
LOG.info("Created group %s" % (hue_group.name,))
# Build a map of user to groups that the user is a member of
members = group.gr_mem
for member in members:
if member not in user_groups:
user_groups[member] = [ hue_group ]
else:
user_groups[member].append(hue_group)
# Now let's import the users
hadoop_users = dict((user.pw_name, user) for user in pwd.getpwall() \
if (user.pw_uid >= min_uid and user.pw_uid < max_uid) or user.pw_name in grp.getgrnam('hadoop').gr_mem)
for username, user in hadoop_users.iteritems():
try:
if check_shell:
pw_shell = user.pw_shell
if subprocess.call([pw_shell, "-c", "echo"], stdout=subprocess.PIPE) != 0:
continue
hue_user = User.objects.get(username=username)
except User.DoesNotExist:
hue_user = User(username=username, password='!', is_active=True, is_superuser=False)
hue_user.set_unusable_password()
# We have to do a save here, because the user needs to exist before we can
# access the associated list of groups
hue_user.save()
if username not in user_groups:
hue_user.groups = []
else:
# Here's where that user to group map we built comes in handy
hue_user.groups = user_groups[username]
hue_user.save()
LOG.info(_("Synced user %s from Unix") % hue_user.username)
__users_lock.release()
__groups_lock.release()
def _check_remove_last_super(user_obj):
"""Raise an error if we're removing the last superuser"""
if not user_obj.is_superuser:
return
# Is there any other active superuser left?
all_active_su = User.objects.filter(is_superuser__exact = True,
is_active__exact = True)
num_active_su = all_active_su.count()
assert num_active_su >= 1, _("No active superuser configured.")
if num_active_su == 1:
raise PopupException(_("You cannot remove the last active superuser from the configuration."), error_code=401)
def _import_ldap_users(connection, username_pattern, sync_groups=False, import_by_dn=False, server=None):
"""
Import a user from LDAP. If import_by_dn is true, this will import the user by
the distinguished name, rather than the configured username attribute.
"""
user_info = connection.find_users(username_pattern, find_by_dn=import_by_dn)
if not user_info:
LOG.warn("Could not get LDAP details for users with pattern %s" % username_pattern)
return None
return _import_ldap_users_info(connection, user_info, sync_groups, import_by_dn, server)
def _import_ldap_users_info(connection, user_info, sync_groups=False, import_by_dn=False, server=None):
"""
Import user_info found through ldap_access.find_users.
"""
imported_users = []
for ldap_info in user_info:
# Extra validation in case import by DN and username has spaces or colons
try:
validate_username(ldap_info['username'])
user, created = ldap_access.get_or_create_ldap_user(username=ldap_info['username'])
profile = get_profile(user)
if not created and profile.creation_method == str(UserProfile.CreationMethod.HUE):
# This is a Hue user, and shouldn't be overwritten
LOG.warn(_('There was a naming conflict while importing user %(username)s') % {
'username': ldap_info['username']
})
return None
default_group = get_default_user_group()
if created and default_group is not None:
user.groups.add(default_group)
if 'first' in ldap_info:
user.first_name = ldap_info['first']
if 'last' in ldap_info:
user.last_name = ldap_info['last']
if 'email' in ldap_info:
user.email = ldap_info['email']
profile.creation_method = UserProfile.CreationMethod.EXTERNAL
profile.save()
user.save()
imported_users.append(user)
# sync groups
if sync_groups:
old_groups = set(user.groups.all())
new_groups = set()
current_ldap_groups = set()
ldap_config = desktop.conf.LDAP.LDAP_SERVERS.get()[server] if server else desktop.conf.LDAP
group_member_attr = ldap_config.GROUPS.GROUP_MEMBER_ATTR.get()
group_filter = ldap_config.GROUPS.GROUP_FILTER.get()
# Search for groups based on group_member_attr=username and group_member_attr=dn
# covers AD, Standard Ldap and posixAcount/posixGroup
if not group_filter.startswith('('):
group_filter = '(' + group_filter + ')'
# Sanitizing the DN before using in a Search filter
sanitized_dn = ldap.filter.escape_filter_chars(ldap_info['dn']).replace(r'\2a', r'*')
sanitized_dn = sanitized_dn.replace(r'\5c,', r'\5c\2c')
find_groups_filter = "(&" + group_filter + "(|(" + group_member_attr + "=" + ldap_info['username'] + ")(" + \
group_member_attr + "=" + sanitized_dn + ")))"
group_ldap_info = connection.find_groups("*", group_filter=find_groups_filter)
for group_info in group_ldap_info:
if Group.objects.filter(name=group_info['name']).exists():
# Add only if user isn't part of group.
current_ldap_groups.add(Group.objects.get(name=group_info['name']))
if not user.groups.filter(name=group_info['name']).exists():
groups = import_ldap_groups(connection, group_info['dn'], import_members=False, import_members_recursive=False, sync_users=True, import_by_dn=True)
if groups:
new_groups.update(groups)
# Remove out of date groups
remove_groups = old_groups - current_ldap_groups
remove_ldap_groups = LdapGroup.objects.filter(group__in=remove_groups)
remove_groups_filtered = [ldapgroup.group for ldapgroup in remove_ldap_groups]
for group in remove_groups_filtered:
user.groups.remove(group)
user.groups.add(*new_groups)
Group.objects.filter(group__in=remove_groups_filtered).delete()
except (AssertionError, RuntimeError) as e:
LOG.warn('%s: %s' % (ldap_info['username'], e.message))
return imported_users
def _import_ldap_members(connection, group, ldap_info, count=0, max_count=1):
if count >= max_count:
return None
# Find all users and groups of group.
users_info = connection.find_users_of_group(ldap_info['dn'])
groups_info = connection.find_groups_of_group(ldap_info['dn'])
posix_members = ldap_info['posix_members']
for user_info in users_info:
LOG.debug("Importing user %s into group %s" % (smart_str(user_info['dn']), smart_str(group.name)))
users = _import_ldap_users(connection, smart_str(user_info['dn']), import_by_dn=True)
group.user_set.add(*users)
for group_info in groups_info:
LOG.debug("Importing group %s" % smart_str(group_info['dn']))
groups = _import_ldap_groups(connection, smart_str(group_info['dn']), import_by_dn=True)
# Must find all members of subgroups
if len(groups) > 1:
LOG.warn('Found multiple groups for member %s.' % smart_str(group_info['dn']))
else:
for group in groups:
_import_ldap_members(connection, group, group_info, count+1, max_count)
for posix_member in posix_members:
LOG.debug("Importing posix user %s into group %s" % (smart_str(posix_member), smart_str(group.name)))
user_info = connection.find_users(posix_member, search_attr='uid', user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
users = _import_ldap_users_info(connection, user_info)
if users:
LOG.debug("Adding member %s represented as users (should be a single user) %s to group %s" % (str(posix_member), str(users), str(group.name)))
group.user_set.add(*users)
def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1):
if count >= max_count:
return None
# Find all users and groups of group.
users_info = connection.find_users_of_group(ldap_info['dn'])
groups_info = connection.find_groups_of_group(ldap_info['dn'])
posix_members = ldap_info['posix_members']
for user_info in users_info:
LOG.debug("Synchronizing user %s with group %s" % (smart_str(user_info['dn']), smart_str(group.name)))
try:
user = ldap_access.get_ldap_user(username=user_info['username'])
group.user_set.add(user)
except User.DoesNotExist:
LOG.warn("Synchronizing user %s with group %s failed. User does not exist." % (smart_str(user_info['dn']), smart_str(group.name)))
for group_info in groups_info:
LOG.debug("Synchronizing group %s" % smart_str(group_info['dn']))
try:
group = Group.objects.get(name=group_info['name'])
_sync_ldap_members(connection, group, group_info, count+1, max_count)
except Group.DoesNotExist:
LOG.warn("Synchronizing group %s failed. Group does not exist." % smart_str(group.name))
for posix_member in posix_members:
LOG.debug("Synchronizing posix user %s with group %s" % (smart_str(posix_member), smart_str(group.name)))
users_info = connection.find_users(posix_member, search_attr='uid', user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
for user_info in users_info:
try:
user = ldap_access.get_ldap_user(username=user_info['username'])
group.user_set.add(user)
except User.DoesNotExist:
LOG.warn("Synchronizing posix user %s with group %s failed. User does not exist." % (smart_str(posix_member), smart_str(group.name)))
def _import_ldap_nested_groups(connection, groupname_pattern, import_members=False, recursive_import_members=False,
sync_users=True, import_by_dn=False):
"""
Import a group from LDAP. If import_members is true, this will also import any
LDAP users that exist within the group. This will use nested groups logic.
A nested group is a group that is a member of another group.
e.g. CN=subtest,OU=groups,DC=exampe,DC=COM is a member of CN=test,OU=groups,DC=exampe,DC=COM
and they both of the object class "groupOfNames" (or some other object class for groups).
"""
if import_by_dn:
scope = ldap.SCOPE_BASE
else:
scope = ldap.SCOPE_SUBTREE
group_info = connection.find_groups(groupname_pattern, find_by_dn=import_by_dn, scope=scope)
if not group_info:
LOG.warn("Could not get LDAP details for group pattern %s" % groupname_pattern)
return None
groups = []
for ldap_info in group_info:
group, created = Group.objects.get_or_create(name=ldap_info['name'])
if not created and not LdapGroup.objects.filter(group=group).exists():
# This is a Hue group, and shouldn't be overwritten
LOG.warn(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
'groupname': ldap_info['name'],
'groupname_pattern': groupname_pattern
})
return None
LdapGroup.objects.get_or_create(group=group)
group.user_set.clear()
# Find members and posix members for group and subgoups
# Import users and groups
max_count = recursive_import_members and desktop.conf.LDAP.NESTED_MEMBERS_SEARCH_DEPTH.get() or 1
if import_members:
_import_ldap_members(connection, group, ldap_info, max_count=max_count)
# Sync users
if sync_users:
_sync_ldap_members(connection, group, ldap_info, max_count=max_count)
group.save()
groups.append(group)
return groups
def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_members=False, recursive_import_members=False,
sync_users=True, import_by_dn=False):
"""
Import a group from LDAP. If import_members is true, this will also import any
LDAP users that exist within the group. This will use suboordinate group logic.
A suboordinate group is a group that is a subentry of another group.
e.g. CN=subtest,CN=test,OU=groups,DC=exampe,DC=COM is a suboordinate group of
CN=test,OU=groups,DC=exampe,DC=COM
"""
if import_by_dn:
scope = ldap.SCOPE_BASE
else:
scope = ldap.SCOPE_SUBTREE
group_info = connection.find_groups(groupname_pattern, find_by_dn=import_by_dn, scope=scope)
if not group_info:
LOG.warn("Could not get LDAP details for group pattern %s" % groupname_pattern)
return None
groups = []
for ldap_info in group_info:
group, created = Group.objects.get_or_create(name=ldap_info['name'])
if not created and not LdapGroup.objects.filter(group=group).exists():
# This is a Hue group, and shouldn't be overwritten
LOG.warn(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
'groupname': ldap_info['name'],
'groupname_pattern': groupname_pattern
})
return None
LdapGroup.objects.get_or_create(group=group)
group.user_set.clear()
# Find members and posix members for group and subgoups
members = ldap_info['members']
posix_members = ldap_info['posix_members']
# @TODO: Deprecate recursive_import_members as it may not be useful.
if import_members:
if recursive_import_members:
for sub_ldap_info in connection.find_groups(ldap_info['dn'], find_by_dn=True):
members += sub_ldap_info['members']
posix_members += sub_ldap_info['posix_members']
for member in members:
LOG.debug("Importing user %s" % smart_str(member))
group.user_set.add( *( _import_ldap_users(connection, member, import_by_dn=True) or [] ) )
# Sync users
if sync_users:
for member in members:
user_info = connection.find_users(member, find_by_dn=True)
if len(user_info) > 1:
LOG.warn('Found multiple users for member %s.' % member)
else:
for ldap_info in user_info:
try:
user = ldap_access.get_ldap_user(username=ldap_info['username'])
group.user_set.add(user)
except User.DoesNotExist:
pass
# Import/fetch posix users and groups
# Posix members
if posix_members:
if import_members:
for posix_member in posix_members:
LOG.debug("Importing user %s" % str(posix_member))
# posixGroup class defines 'memberUid' to be login names,
# which are defined by 'uid'.
user_info = connection.find_users(posix_member, search_attr='uid', user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
users = _import_ldap_users_info(connection, user_info, import_by_dn=False)
if users:
LOG.debug("Adding member %s represented as users (should be a single user) %s to group %s" % (str(posix_member), str(users), str(group.name)))
group.user_set.add(*users)
if sync_users:
for posix_member in posix_members:
user_info = connection.find_users(posix_member, search_attr='uid', user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
if len(user_info) > 1:
LOG.warn('Found multiple users for member %s.' % posix_member)
else:
for ldap_info in user_info:
try:
user = ldap_access.get_ldap_user(username=ldap_info['username'])
group.user_set.add(user)
except User.DoesNotExist:
pass
group.save()
groups.append(group)
return groups
def _import_ldap_groups(connection, groupname_pattern, import_members=False, recursive_import_members=False,
sync_users=True, import_by_dn=False):
"""
Import a group from LDAP. If import_members is true, this will also import any
LDAP users that exist within the group.
"""
if desktop.conf.LDAP.SUBGROUPS.get().lower() == 'suboordinate':
return _import_ldap_suboordinate_groups(connection=connection,
groupname_pattern=groupname_pattern,
import_members=import_members,
recursive_import_members=recursive_import_members,
sync_users=sync_users,
import_by_dn=import_by_dn)
else:
return _import_ldap_nested_groups(connection=connection,
groupname_pattern=groupname_pattern,
import_members=import_members,
recursive_import_members=recursive_import_members,
sync_users=sync_users,
import_by_dn=import_by_dn)
| apache-2.0 |
pataquets/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/driver.py | 113 | 25616 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import base64
import copy
import logging
import re
import shlex
import sys
import time
import os
from webkitpy.common.system import path
from webkitpy.common.system.profiler import ProfilerFactory
_log = logging.getLogger(__name__)
class DriverInput(object):
def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, args=None):
self.test_name = test_name
self.timeout = timeout # in ms
self.image_hash = image_hash
self.should_run_pixel_test = should_run_pixel_test
self.args = args or []
class DriverOutput(object):
"""Groups information about a output from driver for easy passing
and post-processing of data."""
strip_patterns = []
strip_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), ''))
strip_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), ''))
strip_patterns.append((re.compile('text run width -?[0-9]+: '), ''))
strip_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), ''))
strip_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}'))
strip_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}'))
strip_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}'))
strip_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}'))
strip_patterns.append((re.compile('\([0-9]+px'), 'px'))
strip_patterns.append((re.compile(' *" *\n +" *'), ' '))
strip_patterns.append((re.compile('" +$'), '"'))
strip_patterns.append((re.compile('- '), '-'))
strip_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"'))
strip_patterns.append((re.compile('\s+"\n'), '"\n'))
strip_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth'))
strip_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight'))
strip_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX'))
strip_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY'))
strip_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled'))
def __init__(self, text, image, image_hash, audio, crash=False,
test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??',
crashed_pid=None, crash_log=None, pid=None):
# FIXME: Args could be renamed to better clarify what they do.
self.text = text
self.image = image # May be empty-string if the test crashes.
self.image_hash = image_hash
self.image_diff = None # image_diff gets filled in after construction.
self.audio = audio # Binary format is port-dependent.
self.crash = crash
self.crashed_process_name = crashed_process_name
self.crashed_pid = crashed_pid
self.crash_log = crash_log
self.test_time = test_time
self.measurements = measurements
self.timeout = timeout
self.error = error # stderr output
self.pid = pid
def has_stderr(self):
return bool(self.error)
def strip_metrics(self):
if not self.text:
return
for pattern in self.strip_patterns:
self.text = re.sub(pattern[0], pattern[1], self.text)
class Driver(object):
"""object for running test(s) using DumpRenderTree/WebKitTestRunner."""
def __init__(self, port, worker_number, pixel_tests, no_timeout=False):
"""Initialize a Driver to subsequently run tests.
Typically this routine will spawn DumpRenderTree in a config
ready for subsequent input.
port - reference back to the port object.
worker_number - identifier for a particular worker/driver instance
"""
self._port = port
self._worker_number = worker_number
self._no_timeout = no_timeout
self._driver_tempdir = None
# WebKitTestRunner can report back subprocess crashes by printing
# "#CRASHED - PROCESSNAME". Since those can happen at any time
# and ServerProcess won't be aware of them (since the actual tool
# didn't crash, just a subprocess) we record the crashed subprocess name here.
self._crashed_process_name = None
self._crashed_pid = None
# WebKitTestRunner can report back subprocesses that became unresponsive
# This could mean they crashed.
self._subprocess_was_unresponsive = False
# stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated
# stderr output, as well as if we've seen #EOF on this driver instance.
# FIXME: We should probably remove _read_first_block and _read_optional_image_block and
# instead scope these locally in run_test.
self.error_from_test = str()
self.err_seen_eof = False
self._server_process = None
self._measurements = {}
if self._port.get_option("profile"):
profiler_name = self._port.get_option("profiler")
self._profiler = ProfilerFactory.create_profiler(self._port.host,
self._port._path_to_driver(), self._port.results_directory(), profiler_name)
else:
self._profiler = None
def __del__(self):
self.stop()
def run_test(self, driver_input, stop_when_done):
"""Run a single test and return the results.
Note that it is okay if a test times out or crashes and leaves
the driver in an indeterminate state. The upper layers of the program
are responsible for cleaning up and ensuring things are okay.
Returns a DriverOutput object.
"""
start_time = time.time()
self.start(driver_input.should_run_pixel_test, driver_input.args)
test_begin_time = time.time()
self.error_from_test = str()
self.err_seen_eof = False
command = self._command_from_driver_input(driver_input)
deadline = test_begin_time + int(driver_input.timeout) / 1000.0
self._server_process.write(command)
text, audio = self._read_first_block(deadline) # First block is either text or audio
image, actual_image_hash = self._read_optional_image_block(deadline) # The second (optional) block is image data.
crashed = self.has_crashed()
timed_out = self._server_process.timed_out
pid = self._server_process.pid()
if stop_when_done or crashed or timed_out:
# We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output.
# In the timeout case, we kill the hung process as well.
out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0)
if out:
text += out
if err:
self.error_from_test += err
self._server_process = None
crash_log = None
if crashed:
self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time)
# If we don't find a crash log use a placeholder error message instead.
if not crash_log:
pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid"
crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str)
# If we were unresponsive append a message informing there may not have been a crash.
if self._subprocess_was_unresponsive:
crash_log += 'Process failed to become responsive before timing out.\n'
# Print stdout and stderr to the placeholder crash log; we want as much context as possible.
if self.error_from_test:
crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test)
return DriverOutput(text, image, actual_image_hash, audio,
crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements,
timeout=timed_out, error=self.error_from_test,
crashed_process_name=self._crashed_process_name,
crashed_pid=self._crashed_pid, crash_log=crash_log, pid=pid)
def _get_crash_log(self, stdout, stderr, newer_than):
return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than)
# FIXME: Seems this could just be inlined into callers.
@classmethod
def _command_wrapper(cls, wrapper_option):
# Hook for injecting valgrind or other runtime instrumentation,
# used by e.g. tools/valgrind/valgrind_tests.py.
return shlex.split(wrapper_option) if wrapper_option else []
HTTP_DIR = "http/tests/"
HTTP_LOCAL_DIR = "http/tests/local/"
def is_http_test(self, test_name):
return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR)
def test_to_uri(self, test_name):
"""Convert a test name to a URI."""
if not self.is_http_test(test_name):
return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name))
relative_path = test_name[len(self.HTTP_DIR):]
# TODO(dpranke): remove the SSL reference?
if relative_path.startswith("ssl/"):
return "https://127.0.0.1:8443/" + relative_path
return "http://127.0.0.1:8000/" + relative_path
def uri_to_test(self, uri):
"""Return the base layout test name for a given URI.
This returns the test name for a given URI, e.g., if you passed in
"file:///src/LayoutTests/fast/html/keygen.html" it would return
"fast/html/keygen.html".
"""
if uri.startswith("file:///"):
prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir())
if not prefix.endswith('/'):
prefix += '/'
return uri[len(prefix):]
if uri.startswith("http://"):
return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR)
if uri.startswith("https://"):
return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR)
raise NotImplementedError('unknown url type: %s' % uri)
def has_crashed(self):
if self._server_process is None:
return False
if self._crashed_process_name:
return True
if self._server_process.has_crashed():
self._crashed_process_name = self._server_process.name()
self._crashed_pid = self._server_process.pid()
return True
return False
def start(self, pixel_tests, per_test_args):
# FIXME: Callers shouldn't normally call this, since this routine
# may not be specifying the correct combination of pixel test and
# per_test args.
#
# The only reason we have this routine at all is so the perftestrunner
# can pause before running a test; it might be better to push that
# into run_test() directly.
if not self._server_process:
self._start(pixel_tests, per_test_args)
self._run_post_start_tasks()
def _setup_environ_for_driver(self, environment):
environment['DYLD_LIBRARY_PATH'] = self._port._build_path()
environment['DYLD_FRAMEWORK_PATH'] = self._port._build_path()
# FIXME: We're assuming that WebKitTestRunner checks this DumpRenderTree-named environment variable.
# FIXME: Commented out for now to avoid tests breaking. Re-enable after
# we cut over to NRWT
#environment['DUMPRENDERTREE_TEMP'] = str(self._port._driver_tempdir_for_environment())
environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir)
environment['LOCAL_RESOURCE_ROOT'] = self._port.layout_tests_dir()
if 'WEBKIT_OUTPUTDIR' in os.environ:
environment['WEBKIT_OUTPUTDIR'] = os.environ['WEBKIT_OUTPUTDIR']
if self._profiler:
environment = self._profiler.adjusted_environment(environment)
return environment
def _start(self, pixel_tests, per_test_args):
self.stop()
self._driver_tempdir = self._port._driver_tempdir()
server_name = self._port.driver_name()
environment = self._port.setup_environ_for_server(server_name)
environment = self._setup_environ_for_driver(environment)
self._crashed_process_name = None
self._crashed_pid = None
self._server_process = self._port._server_process_constructor(self._port, server_name, self.cmd_line(pixel_tests, per_test_args), environment)
self._server_process.start()
def _run_post_start_tasks(self):
# Remote drivers may override this to delay post-start tasks until the server has ack'd.
if self._profiler:
self._profiler.attach_to_pid(self._pid_on_target())
def _pid_on_target(self):
# Remote drivers will override this method to return the pid on the device.
return self._server_process.pid()
def stop(self):
if self._server_process:
self._server_process.stop(self._port.driver_stop_timeout())
self._server_process = None
if self._profiler:
self._profiler.profile_after_exit()
if self._driver_tempdir:
self._port._filesystem.rmtree(str(self._driver_tempdir))
self._driver_tempdir = None
def cmd_line(self, pixel_tests, per_test_args):
cmd = self._command_wrapper(self._port.get_option('wrapper'))
cmd.append(self._port._path_to_driver())
if self._port.get_option('gc_between_tests'):
cmd.append('--gc-between-tests')
if self._port.get_option('complex_text'):
cmd.append('--complex-text')
if self._port.get_option('threaded'):
cmd.append('--threaded')
if self._no_timeout:
cmd.append('--no-timeout')
# FIXME: We need to pass --timeout=SECONDS to WebKitTestRunner for WebKit2.
cmd.extend(self._port.get_option('additional_drt_flag', []))
cmd.extend(self._port.additional_drt_flag())
cmd.extend(per_test_args)
cmd.append('-')
return cmd
def _check_for_driver_crash(self, error_line):
if error_line == "#CRASHED\n":
# This is used on Windows to report that the process has crashed
# See http://trac.webkit.org/changeset/65537.
self._crashed_process_name = self._server_process.name()
self._crashed_pid = self._server_process.pid()
elif (error_line.startswith("#CRASHED - ")
or error_line.startswith("#PROCESS UNRESPONSIVE - ")):
# WebKitTestRunner uses this to report that the WebProcess subprocess crashed.
match = re.match('#(?:CRASHED|PROCESS UNRESPONSIVE) - (\S+)', error_line)
self._crashed_process_name = match.group(1) if match else 'WebProcess'
match = re.search('pid (\d+)', error_line)
pid = int(match.group(1)) if match else None
self._crashed_pid = pid
# FIXME: delete this after we're sure this code is working :)
_log.debug('%s crash, pid = %s, error_line = %s' % (self._crashed_process_name, str(pid), error_line))
if error_line.startswith("#PROCESS UNRESPONSIVE - "):
self._subprocess_was_unresponsive = True
self._port.sample_process(self._crashed_process_name, self._crashed_pid)
# We want to show this since it's not a regular crash and probably we don't have a crash log.
self.error_from_test += error_line
return True
return self.has_crashed()
def _command_from_driver_input(self, driver_input):
# FIXME: performance tests pass in full URLs instead of test names.
if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'):
command = driver_input.test_name
elif self.is_http_test(driver_input.test_name):
command = self.test_to_uri(driver_input.test_name)
else:
command = self._port.abspath_for_test(driver_input.test_name)
if sys.platform == 'cygwin':
command = path.cygpath(command)
assert not driver_input.image_hash or driver_input.should_run_pixel_test
# ' is the separator between arguments.
if self._port.supports_per_test_timeout():
command += "'--timeout'%s" % driver_input.timeout
if driver_input.should_run_pixel_test:
command += "'--pixel-test"
if driver_input.image_hash:
command += "'" + driver_input.image_hash
return command + "\n"
def _read_first_block(self, deadline):
# returns (text_content, audio_content)
block = self._read_block(deadline)
if block.malloc:
self._measurements['Malloc'] = float(block.malloc)
if block.js_heap:
self._measurements['JSHeap'] = float(block.js_heap)
if block.content_type == 'audio/wav':
return (None, block.decoded_content)
return (block.decoded_content, None)
def _read_optional_image_block(self, deadline):
# returns (image, actual_image_hash)
block = self._read_block(deadline, wait_for_stderr_eof=True)
if block.content and block.content_type == 'image/png':
return (block.decoded_content, block.content_hash)
return (None, block.content_hash)
def _read_header(self, block, line, header_text, header_attr, header_filter=None):
if line.startswith(header_text) and getattr(block, header_attr) is None:
value = line.split()[1]
if header_filter:
value = header_filter(value)
setattr(block, header_attr, value)
return True
return False
def _process_stdout_line(self, block, line):
if (self._read_header(block, line, 'Content-Type: ', 'content_type')
or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding')
or self._read_header(block, line, 'Content-Length: ', '_content_length', int)
or self._read_header(block, line, 'ActualHash: ', 'content_hash')
or self._read_header(block, line, 'DumpMalloc: ', 'malloc')
or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap')):
return
# Note, we're not reading ExpectedHash: here, but we could.
# If the line wasn't a header, we just append it to the content.
block.content += line
def _strip_eof(self, line):
if line and line.endswith("#EOF\n"):
return line[:-5], True
return line, False
def _read_block(self, deadline, wait_for_stderr_eof=False):
block = ContentBlock()
out_seen_eof = False
while not self.has_crashed():
if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof):
break
if self.err_seen_eof:
out_line = self._server_process.read_stdout_line(deadline)
err_line = None
elif out_seen_eof:
out_line = None
err_line = self._server_process.read_stderr_line(deadline)
else:
out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline)
if self._server_process.timed_out or self.has_crashed():
break
if out_line:
assert not out_seen_eof
out_line, out_seen_eof = self._strip_eof(out_line)
if err_line:
assert not self.err_seen_eof
err_line, self.err_seen_eof = self._strip_eof(err_line)
if out_line:
if out_line[-1] != "\n":
_log.error("Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug.")
content_length_before_header_check = block._content_length
self._process_stdout_line(block, out_line)
# FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header.
# Don't wait until we're done with headers, just read the binary blob right now.
if content_length_before_header_check != block._content_length:
block.content = self._server_process.read_stdout(deadline, block._content_length)
if err_line:
if self._check_for_driver_crash(err_line):
break
self.error_from_test += err_line
block.decode_content()
return block
class ContentBlock(object):
def __init__(self):
self.content_type = None
self.encoding = None
self.content_hash = None
self._content_length = None
# Content is treated as binary data even though the text output is usually UTF-8.
self.content = str() # FIXME: Should be bytearray() once we require Python 2.6.
self.decoded_content = None
self.malloc = None
self.js_heap = None
def decode_content(self):
if self.encoding == 'base64' and self.content is not None:
self.decoded_content = base64.b64decode(self.content)
else:
self.decoded_content = self.content
class DriverProxy(object):
"""A wrapper for managing two Driver instances, one with pixel tests and
one without. This allows us to handle plain text tests and ref tests with a
single driver."""
def __init__(self, port, worker_number, driver_instance_constructor, pixel_tests, no_timeout):
self._port = port
self._worker_number = worker_number
self._driver_instance_constructor = driver_instance_constructor
self._no_timeout = no_timeout
# FIXME: We shouldn't need to create a driver until we actually run a test.
self._driver = self._make_driver(pixel_tests)
self._driver_cmd_line = None
def _make_driver(self, pixel_tests):
return self._driver_instance_constructor(self._port, self._worker_number, pixel_tests, self._no_timeout)
# FIXME: this should be a @classmethod (or implemented on Port instead).
def is_http_test(self, test_name):
return self._driver.is_http_test(test_name)
# FIXME: this should be a @classmethod (or implemented on Port instead).
def test_to_uri(self, test_name):
return self._driver.test_to_uri(test_name)
# FIXME: this should be a @classmethod (or implemented on Port instead).
def uri_to_test(self, uri):
return self._driver.uri_to_test(uri)
def run_test(self, driver_input, stop_when_done):
base = self._port.lookup_virtual_test_base(driver_input.test_name)
if base:
virtual_driver_input = copy.copy(driver_input)
virtual_driver_input.test_name = base
virtual_driver_input.args = self._port.lookup_virtual_test_args(driver_input.test_name)
return self.run_test(virtual_driver_input, stop_when_done)
pixel_tests_needed = driver_input.should_run_pixel_test
cmd_line_key = self._cmd_line_as_key(pixel_tests_needed, driver_input.args)
if cmd_line_key != self._driver_cmd_line:
self._driver.stop()
self._driver = self._make_driver(pixel_tests_needed)
self._driver_cmd_line = cmd_line_key
return self._driver.run_test(driver_input, stop_when_done)
def has_crashed(self):
return self._driver.has_crashed()
def stop(self):
self._driver.stop()
# FIXME: this should be a @classmethod (or implemented on Port instead).
def cmd_line(self, pixel_tests=None, per_test_args=None):
return self._driver.cmd_line(pixel_tests, per_test_args or [])
def _cmd_line_as_key(self, pixel_tests, per_test_args):
return ' '.join(self.cmd_line(pixel_tests, per_test_args))
| bsd-3-clause |
vivek8943/GeoTweetSearch | api/web/twitter_instance.py | 1 | 17775 | import json
import logging
from threading import RLock
import time
from api.caching.instance_codes import consumeCode, unconsumeCode
from api.caching.instance_lifetime import removeInstance, addInstance, setInstanceTemporalSourceLastTime
from api.caching.temporal_analytics import getTemporalInfluenceCollection, addTemporalEntry
from api.caching.tweet_user import getUserCollection, getTweetCollection
from api.config import Configuration
from api.core.data_structures.timestamp import Timestamped
from api.core.threads import startTwitterThread
from api.core.threads_core import BaseThread
from api.core.utility import criticalSection, getUniqueId, joinStringsToLengthPretty, joinStringsGrammarPretty, joinListOfLists, splitList, getEpochMs
from api.geocode.geocode_shared import GeocodeResultAbstract
from api.twitter.feed import TwitterAuthentication, TwitterSession
from api.twitter.flow.data_core import DataCollection
logger = logging.getLogger(__name__)
INFLUENCE_SOURCE = 1
GEOGRAPHICAL_FILTER = 2
INFLUENCE_SOURCE_AND_GEOGRAPHICAL_FILTER = 3
RECTANGLE_TYPE = 1
MARKER_TYPE = 2
def processRegionJsonString(regionJsonString):
regionJson = json.loads(regionJsonString)
results = {}
for item in regionJson:
displayType, entityType, coords, extraData = item
byDisplayType = results.setdefault(displayType,dict())
byEntityType = byDisplayType.setdefault(entityType, list())
byEntityType.append({'coords' : coords, 'extra_data' : extraData})
return results
def getInfluenceSourceRectangles(processedRegionJson):
rectangleType = processedRegionJson.get(RECTANGLE_TYPE,None)
if rectangleType is None:
return list()
result = rectangleType.get(INFLUENCE_SOURCE,list())
result += rectangleType.get(INFLUENCE_SOURCE_AND_GEOGRAPHICAL_FILTER,list())
return result
def getInfluenceSourceMarkers(processedRegionJson):
markerType = processedRegionJson.get(MARKER_TYPE,None)
if markerType is None:
return list()
results = markerType.get(INFLUENCE_SOURCE,list())
results += markerType.get(INFLUENCE_SOURCE_AND_GEOGRAPHICAL_FILTER,list())
return results
def getInfluenceSourceIdsFromMarkers(markers):
results = list()
for item in markers:
results.append(item['extra_data']['cacheId'])
return results
def getCoordsFromItems(items):
results = list()
for item in items:
results.append(item['coords'])
return results
def formatCoordsForTwitter(coords):
return splitList(joinListOfLists(coords),2)
def getGeographicalFilterRectangles(processedRegionJson):
rectType = processedRegionJson.get(RECTANGLE_TYPE,None)
if rectType is None:
return list()
return rectType.get(GEOGRAPHICAL_FILTER,list()) + rectType.get(INFLUENCE_SOURCE_AND_GEOGRAPHICAL_FILTER,list())
class TwitterInstance(Timestamped):
def __init__(self,
instanceKey,
parentTwitterInstances,
twitterAuthentication,
geographicSetupString,
keywords,
instanceSetupCode,
startTime = None,
lastTemporalTimeIdBySource = None):
super(TwitterInstance, self).__init__(startTime)
logger.debug('Instance is %dms old' % self.construct_age)
assert isinstance(parentTwitterInstances,TwitterInstances)
assert isinstance(twitterAuthentication,TwitterAuthentication)
if lastTemporalTimeIdBySource is None:
lastTemporalTimeIdBySource = dict()
if instanceSetupCode is None or len(instanceSetupCode) == 0:
self.enable_shutdown_after_no_usage = True
self.instance_setup_code = None
else:
codeValid = consumeCode(instanceSetupCode)
if codeValid:
logger.info('Instance %s has loaded setup code %s' % (instanceKey, instanceSetupCode))
self.instance_setup_code = instanceSetupCode
self.enable_shutdown_after_no_usage = False
else:
logger.warn('Instance %s was provided with an invalid setup code: %s' % (instanceKey, instanceSetupCode))
raise ValueError('Invalid setup code: %s' % unicode(instanceSetupCode))
self.twitter_authentication = twitterAuthentication
self.oauth = TwitterInstance.makeAuthTuple(twitterAuthentication.access_token, twitterAuthentication.access_secret)
self.instance_key = instanceKey
self.geographic_setup_string = geographicSetupString
self.parent_twitter_instances = parentTwitterInstances
self.region_json = processRegionJsonString(self.geographic_setup_string)
self.geographical_filter_rectangles = formatCoordsForTwitter(getCoordsFromItems(getGeographicalFilterRectangles(self.region_json)))
self.influence_source_rectangles = getCoordsFromItems(getInfluenceSourceRectangles(self.region_json))
self.influence_source_cache_ids = getInfluenceSourceIdsFromMarkers(getInfluenceSourceMarkers(self.region_json))
self.keywords = keywords
self.is_shutdown = False
self.last_temporal_time_id_by_source = lastTemporalTimeIdBySource
# Add first so that we only try to set this up one at a time.
# i.e. if instance takes an hour to start up several requests come in
# but we start that instance only once and subsequent requests are ignored.
self.parent_twitter_instances.add(self)
try:
twitterThread = startTwitterThread(self)
self.twitter_thread = twitterThread
self.setup_error = None
except TwitterSession.SessionException as e:
problemStr = 'Failed to establish twitter connection to streaming API with oauth: %s - instance could not be started, reason: %s' % (unicode(self.oauth), e)
logger.error(problemStr)
self.shutdownInstance()
self.setup_error = problemStr
return
addInstance(self.instance_key,
self.twitter_authentication.access_token,
self.twitter_authentication.access_secret,
self.geographic_setup_string,
self.keywords,
self.instance_setup_code,
self.constructed_at)
@staticmethod
def makeAuthTuple(oauthToken, oauthSecret):
return oauthToken, oauthSecret
def getShortDescription(self, capital):
def doCapital():
if capital:
return 'L' + self._short_description
else:
return 'l' + self._short_description
try:
return doCapital()
except AttributeError:
instanceKeywords = self.twitter_thread.twitter_feed.keywords
instanceNumGeographicAreas = len(self.twitter_thread.twitter_feed.locations) / 2 # divide by two because each area has two coordinates.
numInfluenceAreas = len(self.influence_source_rectangles)
numInfluenceLocations = len(self.influence_source_cache_ids)
if instanceKeywords is not None and len(instanceKeywords) > 0:
keywordsString = 'keywords: %s' % joinStringsToLengthPretty(instanceKeywords,Configuration.INSTANCE_SHORT_DESCRIPTION_KEYWORDS_MAX_LENGTH)
else:
keywordsString = None
if instanceNumGeographicAreas > 0:
geographicString = '%d geographic area' % instanceNumGeographicAreas
if instanceNumGeographicAreas > 1:
geographicString += 's'
else:
geographicString = None
if numInfluenceLocations > 0:
influenceLocationsString = '%d influence source location' % numInfluenceLocations
if numInfluenceLocations > 1:
influenceLocationsString += 's'
else:
influenceLocationsString = None
if numInfluenceAreas > 0:
influenceAreasString = '%d influence source area' % numInfluenceAreas
if numInfluenceAreas > 1:
influenceAreasString += 's'
else:
influenceAreasString = None
self._short_description = 'ooking at %s' % joinStringsGrammarPretty([keywordsString, geographicString, influenceLocationsString, influenceAreasString])
return doCapital()
def shutdownInstance(self, removeFromTwitterInstancesParent = True):
if self.is_shutdown:
return
if removeFromTwitterInstancesParent and self.parent_twitter_instances is not None:
# Will call this method with removeFromTwitterInstancesParent set to False.
self.parent_twitter_instances.removeTwitterInstanceByAuth(self.oauth)
else:
self.is_shutdown = True
instanceKey = unicode(self.instance_key)
logger.info('Shutdown instance called on instance %s' % instanceKey)
logger.info('Shutting down twitter thread on instance %s..' % instanceKey)
try:
self.twitter_thread.stop()
# might not have been initialized yet.
except AttributeError:
pass
# Don't wait on thread, cannot find a way to terminate post request in requests API so have
# to wait for next tweet or keep alive request to come from twitter before terminating.
#self.twitter_thread.join()
# Wait for current write to finish, avoid dropping collection and then when write completes
# collection is made again.
time.sleep(1.5)
logger.info('Dropping twitter user data on instance %s..' % instanceKey)
getUserCollection(instanceKey).drop()
logger.info('Dropping twitter tweet data on instance %s..' % instanceKey)
getTweetCollection(instanceKey).drop()
logger.info('Dropping twitter temporal influence data on instance %s..' % instanceKey)
getTemporalInfluenceCollection(instanceKey).drop()
if self.instance_setup_code is not None:
logger.info('Returning instance setup code %s on instance %s..' % (instanceKey, self.instance_setup_code))
unconsumeCode(self.instance_setup_code)
logger.info('Removing instance from instance %s lifetime collection..' % instanceKey)
removeInstance(instanceKey)
logger.info('Instance %s cleaned up successfully' % instanceKey)
def addTemporalEntry(self, temporalCollection, timeId, userProviderId, userPlaceId, followerProviderId, followerPlaceId, followerPlaceType):
if self.is_shutdown:
return
tupleUserCacheId = GeocodeResultAbstract.buildCacheIdTuple(userProviderId, userPlaceId)
dictUserCacheId = GeocodeResultAbstract.buildCacheId(userProviderId, userPlaceId)
lastTimeId = self.last_temporal_time_id_by_source.get(tupleUserCacheId,None)
destination = '%s_%s' % (followerPlaceType, followerPlaceId)
addTemporalEntry(temporalCollection, lastTimeId, timeId, dictUserCacheId, destination, followerProviderId)
self.last_temporal_time_id_by_source[tupleUserCacheId] = timeId
setInstanceTemporalSourceLastTime(self.instance_key, userProviderId, userPlaceId, timeId)
class TwitterInstances(object):
def __init__(self, dataCollection, tweetProvider):
super(TwitterInstances, self).__init__()
assert isinstance(dataCollection, DataCollection)
self._by_oauth = dict()
self._by_instance_key = dict()
self._lock = RLock()
self.data_collection = dataCollection
self.tweet_provider = tweetProvider
def add(self, twitterInstance):
assert isinstance(twitterInstance, TwitterInstance)
self._lock.acquire()
try:
self._by_instance_key[twitterInstance.instance_key] = twitterInstance
self._by_oauth[twitterInstance.oauth] = twitterInstance
finally:
self._lock.release()
def getUniqueInstanceKey(self):
def func():
instanceKey = unicode(getUniqueId())
while instanceKey in self._by_instance_key:
instanceKey = unicode(getUniqueId())
return instanceKey
return criticalSection(self._lock, func)
def createInstance(self, twitterAuthentication, geographic_setup_string, keywords, instance_setup_code):
def func():
twitterInstance = TwitterInstance(self.getUniqueInstanceKey(),
self,
twitterAuthentication,
geographic_setup_string,
keywords,
instance_setup_code)
return twitterInstance
return criticalSection(self._lock, func)
def getInstanceList(self):
return criticalSection(self._lock, lambda: list(self._by_instance_key.values()))
def isInstanceKeyInUse(self, instanceKey):
return criticalSection(self._lock, lambda: instanceKey in self._by_instance_key)
def isAuthInUse(self, oauth):
return criticalSection(self._lock, lambda: oauth in self._by_oauth)
def getInstanceByInstanceKey(self, instanceKey):
result = criticalSection(self._lock, lambda: self._by_instance_key.get(instanceKey, None))
return result
def getInstanceByAuth(self, oauth):
result = criticalSection(self._lock, lambda: self._by_oauth.get(oauth, None))
return result
def removeTwitterInstanceByInstanceKey(self, instanceKey):
self._lock.acquire()
try:
instance = self._by_instance_key.get(instanceKey)
if instance is None:
return None
assert isinstance(instance, TwitterInstance)
# Remove from dictionaries first so that it is no
# longer accessible from the rest of the application.
del self._by_instance_key[instanceKey]
del self._by_oauth[instance.oauth]
finally:
self._lock.release()
# Cleanup instance.
instance.shutdownInstance(False)
self.data_collection.removeInstanceData(instanceKey)
return instance
def removeTwitterInstanceByAuth(self, oauth):
self._lock.acquire()
try:
instance = self._by_oauth.get(oauth)
if instance is None:
return None
assert isinstance(instance, TwitterInstance)
# Remove from dictionaries first so that it is no
# longer accessible from the rest of the application.
del self._by_oauth[oauth]
del self._by_instance_key[instance.instance_key]
print unicode(self._by_instance_key)
finally:
self._lock.release()
# Cleanup instance.
instance.shutdownInstance(False)
self.data_collection.removeInstanceData(unicode(instance.instance_key))
return instance
def restartTwitterInstanceByAuth(twitterInstances, auth):
twitterInstance = twitterInstances.removeTwitterInstanceByAuth(auth)
if twitterInstance is None:
problemStr = 'Failed to remove instance with auth: %s' % unicode(auth)
logger.error(problemStr)
return None
assert isinstance(twitterInstance,TwitterInstance)
return TwitterInstance(twitterInstance.instance_key,
twitterInstance.parent_twitter_instances,
twitterInstance.twitter_authentication,
twitterInstance.geographic_setup_string,
twitterInstance.keywords,
twitterInstance.instance_setup_code)
def restartTwitterInstance(twitterInstances, twitterInstance):
restartTwitterInstanceByAuth(twitterInstances, twitterInstance.oauth)
class TwitterInstancesPruner(BaseThread):
""" This utility thread loops through all the twitter instances
and checks when they were last used, cleaning up old ones. """
def __init__(self, maxInactive, maxConstructAge, instances):
super(TwitterInstancesPruner, self).__init__('TwitterInstancesPruner',
criticalThread = True)
assert isinstance(instances, TwitterInstances)
self.max_inactive = maxInactive
self.max_construct_age = maxConstructAge
self.instances = instances
def _run(self):
while True:
copy = criticalSection(self.instances._lock, lambda: dict(self.instances._by_oauth))
for oauth, instance in copy.iteritems():
assert isinstance(instance, TwitterInstance)
if instance.enable_shutdown_after_no_usage and instance.age > self.max_inactive:
# I want to see this in the error log.
logger.critical('Cleaning up instance with oauth: %s, it has been inactive for > %dms' % (unicode(oauth), self.max_inactive))
self.instances.removeTwitterInstanceByAuth(oauth)
if instance.construct_age > self.max_construct_age:
logger.critical('Restarting instance with oauth: %s, it has been alive > %dms' % (unicode(oauth), self.max_construct_age))
result = restartTwitterInstanceByAuth(self.instances, oauth)
if result is not None:
logger.error('Failed to restart instance with oauth: %s, reason: %s' % (unicode(oauth),result))
time.sleep(2) | apache-2.0 |
psawaya/Mental-Ginger | django/core/servers/basehttp.py | 153 | 25156 | """
BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21).
Adapted from wsgiref.simple_server: http://svn.eby-sarna.com/wsgiref/
This is a simple server for use in testing or debugging Django apps. It hasn't
been reviewed for security issues. Don't use it for production use.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import os
import re
import socket
import sys
import urllib
import warnings
from django.core.management.color import color_style
from django.utils.http import http_date
from django.utils._os import safe_join
from django.views import static
from django.contrib.staticfiles import handlers
__version__ = "0.1"
__all__ = ['WSGIServer','WSGIRequestHandler']
server_version = "WSGIServer/" + __version__
sys_version = "Python/" + sys.version.split()[0]
software_version = server_version + ' ' + sys_version
class WSGIServerException(Exception):
pass
class FileWrapper(object):
"""Wrapper to convert file-like objects to iterables"""
def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike,'close'):
self.close = filelike.close
def __getitem__(self,key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError
def __iter__(self):
return self
def next(self):
data = self.filelike.read(self.blksize)
if data:
return data
raise StopIteration
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _formatparam(param, value=None, quote=1):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
if quote or tspecials.search(value):
value = value.replace('\\', '\\\\').replace('"', r'\"')
return '%s="%s"' % (param, value)
else:
return '%s=%s' % (param, value)
else:
return param
class Headers(object):
"""Manage a collection of HTTP response headers"""
def __init__(self,headers):
if not isinstance(headers, list):
raise TypeError("Headers must be a list of name/value tuples")
self._headers = headers
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __setitem__(self, name, val):
"""Set the value of a header."""
del self[name]
self._headers.append((name, val))
def __delitem__(self,name):
"""Delete all occurrences of a header, if present.
Does *not* raise an exception if the header is missing.
"""
name = name.lower()
self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name]
def __getitem__(self,name):
"""Get the first header value for 'name'
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, the first exactly which
occurrance gets returned is undefined. Use getall() to get all
the values matching a header field name.
"""
return self.get(name)
def has_key(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
__contains__ = has_key
def get_all(self, name):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original header
list or were added to this instance, and may contain duplicates. Any
fields deleted and re-inserted are always appended to the header list.
If no fields exist with the given name, returns an empty list.
"""
name = name.lower()
return [kv[1] for kv in self._headers if kv[0].lower()==name]
def get(self,name,default=None):
"""Get the first header value for 'name', or return 'default'"""
name = name.lower()
for k,v in self._headers:
if k.lower()==name:
return v
return default
def keys(self):
"""Return a list of all the header field names.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all header values.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the header fields and values.
These will be sorted in the order they were in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def __repr__(self):
return "Headers(%s)" % `self._headers`
def __str__(self):
"""str() returns the formatted headers, complete with end line,
suitable for direct HTTP transmission."""
return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
def setdefault(self,name,value):
"""Return first matching header value for 'name', or 'value'
If there is no header named 'name', add a new header with name 'name'
and value 'value'."""
result = self.get(name)
if result is None:
self._headers.append((name,value))
return value
else:
return result
def add_header(self, _name, _value, **_params):
"""Extended header setting.
_name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added.
Example:
h.add_header('content-disposition', 'attachment', filename='bud.gif')
Note that unlike the corresponding 'email.Message' method, this does
*not* handle '(charset, language, value)' tuples: all values must be
strings or None.
"""
parts = []
if _value is not None:
parts.append(_value)
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
self._headers.append((_name, "; ".join(parts)))
def guess_scheme(environ):
"""Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
"""
if environ.get("HTTPS") in ('yes','on','1'):
return 'https'
else:
return 'http'
_hop_headers = {
'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1
}
def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
return header_name.lower() in _hop_headers
class ServerHandler(object):
"""Manage the invocation of a WSGI application"""
# Configuration parameters; can override per-subclass or per-instance
wsgi_version = (1,0)
wsgi_multithread = True
wsgi_multiprocess = True
wsgi_run_once = False
origin_server = True # We are transmitting direct to client
http_version = "1.0" # Version that should be used for response
server_software = software_version
# os_environ is used to supply configuration from the OS environment:
# by default it's a copy of 'os.environ' as of import time, but you can
# override this in e.g. your __init__ method.
os_environ = dict(os.environ.items())
# Collaborator classes
wsgi_file_wrapper = FileWrapper # set to None to disable
headers_class = Headers # must be a Headers-like class
# Error handling (also per-subclass or per-instance)
traceback_limit = None # Print entire traceback to self.get_stderr()
error_status = "500 INTERNAL SERVER ERROR"
error_headers = [('Content-Type','text/plain')]
# State variables (don't mess with these)
status = result = None
headers_sent = False
headers = None
bytes_sent = 0
def __init__(self, stdin, stdout, stderr, environ, multithread=True,
multiprocess=False):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.base_env = environ
self.wsgi_multithread = multithread
self.wsgi_multiprocess = multiprocess
def run(self, application):
"""Invoke the application"""
# Note to self: don't move the close()! Asynchronous servers shouldn't
# call close() from finish_response(), so if you close() anywhere but
# the double-error branch here, you'll break asynchronous servers by
# prematurely closing. Async servers must return from 'run()' without
# closing if there might still be output to iterate over.
try:
self.setup_environ()
self.result = application(self.environ, self.start_response)
self.finish_response()
except:
try:
self.handle_error()
except:
# If we get an error handling an error, just give up already!
self.close()
raise # ...and let the actual server figure it out.
def setup_environ(self):
"""Set up the environment for one request"""
env = self.environ = self.os_environ.copy()
self.add_cgi_vars()
env['wsgi.input'] = self.get_stdin()
env['wsgi.errors'] = self.get_stderr()
env['wsgi.version'] = self.wsgi_version
env['wsgi.run_once'] = self.wsgi_run_once
env['wsgi.url_scheme'] = self.get_scheme()
env['wsgi.multithread'] = self.wsgi_multithread
env['wsgi.multiprocess'] = self.wsgi_multiprocess
if self.wsgi_file_wrapper is not None:
env['wsgi.file_wrapper'] = self.wsgi_file_wrapper
if self.origin_server and self.server_software:
env.setdefault('SERVER_SOFTWARE',self.server_software)
def finish_response(self):
"""
Send any iterable data, then close self and the iterable
Subclasses intended for use in asynchronous servers will want to
redefine this method, such that it sets up callbacks in the event loop
to iterate over the data, and to call 'self.close()' once the response
is finished.
"""
if not self.result_is_file() or not self.sendfile():
for data in self.result:
self.write(data)
self.finish_content()
self.close()
def get_scheme(self):
"""Return the URL scheme being used"""
return guess_scheme(self.environ)
def set_content_length(self):
"""Compute Content-Length or switch to chunked encoding if possible"""
try:
blocks = len(self.result)
except (TypeError, AttributeError, NotImplementedError):
pass
else:
if blocks==1:
self.headers['Content-Length'] = str(self.bytes_sent)
return
# XXX Try for chunked encoding if origin server and client is 1.1
def cleanup_headers(self):
"""Make any necessary header changes or defaults
Subclasses can extend this to add other defaults.
"""
if 'Content-Length' not in self.headers:
self.set_content_length()
def start_response(self, status, headers,exc_info=None):
"""'start_response()' callable as specified by PEP 333"""
if exc_info:
try:
if self.headers_sent:
# Re-raise original exception if headers sent
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None # avoid dangling circular ref
elif self.headers is not None:
raise AssertionError("Headers already set!")
assert isinstance(status, str),"Status must be a string"
assert len(status)>=4,"Status must be at least 4 characters"
assert int(status[:3]),"Status message must begin w/3-digit code"
assert status[3]==" ", "Status message must have a space after code"
if __debug__:
for name,val in headers:
assert isinstance(name, str),"Header names must be strings"
assert isinstance(val, str),"Header values must be strings"
assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed"
self.status = status
self.headers = self.headers_class(headers)
return self.write
def send_preamble(self):
"""Transmit version/status/date/server, via self._write()"""
if self.origin_server:
if self.client_is_modern():
self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
if 'Date' not in self.headers:
self._write(
'Date: %s\r\n' % http_date()
)
if self.server_software and 'Server' not in self.headers:
self._write('Server: %s\r\n' % self.server_software)
else:
self._write('Status: %s\r\n' % self.status)
def write(self, data):
"""'write()' callable as specified by PEP 333"""
assert isinstance(data, str), "write() argument must be string"
if not self.status:
raise AssertionError("write() before start_response()")
elif not self.headers_sent:
# Before the first output, send the stored headers
self.bytes_sent = len(data) # make sure we know content-length
self.send_headers()
else:
self.bytes_sent += len(data)
# XXX check Content-Length and truncate if too many bytes written?
# If data is too large, socket will choke, so write chunks no larger
# than 32MB at a time.
length = len(data)
if length > 33554432:
offset = 0
while offset < length:
chunk_size = min(33554432, length)
self._write(data[offset:offset+chunk_size])
self._flush()
offset += chunk_size
else:
self._write(data)
self._flush()
def sendfile(self):
"""Platform-specific file transmission
Override this method in subclasses to support platform-specific
file transmission. It is only called if the application's
return iterable ('self.result') is an instance of
'self.wsgi_file_wrapper'.
This method should return a true value if it was able to actually
transmit the wrapped file-like object using a platform-specific
approach. It should return a false value if normal iteration
should be used instead. An exception can be raised to indicate
that transmission was attempted, but failed.
NOTE: this method should call 'self.send_headers()' if
'self.headers_sent' is false and it is going to attempt direct
transmission of the file1.
"""
return False # No platform-specific transmission by default
def finish_content(self):
"""Ensure headers and content have both been sent"""
if not self.headers_sent:
self.headers['Content-Length'] = "0"
self.send_headers()
else:
pass # XXX check if content-length was too short?
def close(self):
try:
self.request_handler.log_request(self.status.split(' ',1)[0], self.bytes_sent)
finally:
try:
if hasattr(self.result,'close'):
self.result.close()
finally:
self.result = self.headers = self.status = self.environ = None
self.bytes_sent = 0; self.headers_sent = False
def send_headers(self):
"""Transmit headers to the client, via self._write()"""
self.cleanup_headers()
self.headers_sent = True
if not self.origin_server or self.client_is_modern():
self.send_preamble()
self._write(str(self.headers))
def result_is_file(self):
"""True if 'self.result' is an instance of 'self.wsgi_file_wrapper'"""
wrapper = self.wsgi_file_wrapper
return wrapper is not None and isinstance(self.result,wrapper)
def client_is_modern(self):
"""True if client can accept status and headers"""
return self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9'
def log_exception(self,exc_info):
"""Log the 'exc_info' tuple in the server log
Subclasses may override to retarget the output or change its format.
"""
try:
from traceback import print_exception
stderr = self.get_stderr()
print_exception(
exc_info[0], exc_info[1], exc_info[2],
self.traceback_limit, stderr
)
stderr.flush()
finally:
exc_info = None
def handle_error(self):
"""Log current error, and send error output to client if possible"""
self.log_exception(sys.exc_info())
if not self.headers_sent:
self.result = self.error_output(self.environ, self.start_response)
self.finish_response()
# XXX else: attempt advanced recovery techniques for HTML or text?
def error_output(self, environ, start_response):
import traceback
start_response(self.error_status, self.error_headers[:], sys.exc_info())
return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
# Pure abstract methods; *must* be overridden in subclasses
def _write(self,data):
self.stdout.write(data)
self._write = self.stdout.write
def _flush(self):
self.stdout.flush()
self._flush = self.stdout.flush
def get_stdin(self):
return self.stdin
def get_stderr(self):
return self.stderr
def add_cgi_vars(self):
self.environ.update(self.base_env)
class WSGIServer(HTTPServer):
"""BaseHTTPServer that implements the Python WSGI protocol"""
application = None
def __init__(self, *args, **kwargs):
if kwargs.pop('ipv6', False):
self.address_family = socket.AF_INET6
HTTPServer.__init__(self, *args, **kwargs)
def server_bind(self):
"""Override server_bind to store the server name."""
try:
HTTPServer.server_bind(self)
except Exception, e:
raise WSGIServerException(e)
self.setup_environ()
def setup_environ(self):
# Set up base environment
env = self.base_environ = {}
env['SERVER_NAME'] = self.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PORT'] = str(self.server_port)
env['REMOTE_HOST']=''
env['CONTENT_LENGTH']=''
env['SCRIPT_NAME'] = ''
def get_app(self):
return self.application
def set_app(self,application):
self.application = application
class WSGIRequestHandler(BaseHTTPRequestHandler):
server_version = "WSGIServer/" + __version__
def __init__(self, *args, **kwargs):
from django.conf import settings
self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
# We set self.path to avoid crashes in log_message() on unsupported
# requests (like "OPTIONS").
self.path = ''
self.style = color_style()
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def get_environ(self):
env = self.server.base_environ.copy()
env['SERVER_PROTOCOL'] = self.request_version
env['REQUEST_METHOD'] = self.command
if '?' in self.path:
path,query = self.path.split('?',1)
else:
path,query = self.path,''
env['PATH_INFO'] = urllib.unquote(path)
env['QUERY_STRING'] = query
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
for h in self.headers.headers:
k,v = h.split(':',1)
k=k.replace('-','_').upper(); v=v.strip()
if k in env:
continue # skip content length, type,etc.
if 'HTTP_'+k in env:
env['HTTP_'+k] += ','+v # comma-separate multiple headers
else:
env['HTTP_'+k] = v
return env
def get_stderr(self):
return sys.stderr
def handle(self):
"""Handle a single HTTP request"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ())
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def log_message(self, format, *args):
# Don't bother logging requests for admin images or the favicon.
if self.path.startswith(self.admin_media_prefix) or self.path == '/favicon.ico':
return
msg = "[%s] %s\n" % (self.log_date_time_string(), format % args)
# Utilize terminal colors, if available
if args[1][0] == '2':
# Put 2XX first, since it should be the common case
msg = self.style.HTTP_SUCCESS(msg)
elif args[1][0] == '1':
msg = self.style.HTTP_INFO(msg)
elif args[1] == '304':
msg = self.style.HTTP_NOT_MODIFIED(msg)
elif args[1][0] == '3':
msg = self.style.HTTP_REDIRECT(msg)
elif args[1] == '404':
msg = self.style.HTTP_NOT_FOUND(msg)
elif args[1][0] == '4':
msg = self.style.HTTP_BAD_REQUEST(msg)
else:
# Any 5XX, or any other response
msg = self.style.HTTP_SERVER_ERROR(msg)
sys.stderr.write(msg)
class AdminMediaHandler(handlers.StaticFilesHandler):
"""
WSGI middleware that intercepts calls to the admin media directory, as
defined by the ADMIN_MEDIA_PREFIX setting, and serves those images.
Use this ONLY LOCALLY, for development! This hasn't been tested for
security and is not super efficient.
This is pending for deprecation since 1.3.
"""
def get_base_dir(self):
import django
return os.path.join(django.__path__[0], 'contrib', 'admin', 'media')
def get_base_url(self):
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
if not settings.ADMIN_MEDIA_PREFIX:
raise ImproperlyConfigured(
"The ADMIN_MEDIA_PREFIX setting can't be empty "
"when using the AdminMediaHandler, e.g. with runserver.")
return settings.ADMIN_MEDIA_PREFIX
def file_path(self, url):
"""
Returns the path to the media file on disk for the given URL.
The passed URL is assumed to begin with ``self.base_url``. If the
resulting file path is outside the media directory, then a ValueError
is raised.
"""
relative_url = url[len(self.base_url[2]):]
relative_path = urllib.url2pathname(relative_url)
return safe_join(self.base_dir, relative_path)
def serve(self, request):
document_root, path = os.path.split(self.file_path(request.path))
return static.serve(request, path, document_root=document_root)
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the base path
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def run(addr, port, wsgi_handler, ipv6=False):
server_address = (addr, port)
httpd = WSGIServer(server_address, WSGIRequestHandler, ipv6=ipv6)
httpd.set_app(wsgi_handler)
httpd.serve_forever()
| bsd-3-clause |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/reportlab-3.2.0/tests/test_platypus_toc.py | 14 | 12157 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
"""Tests for the Platypus TableOfContents class.
Currently there is only one such test. Most such tests, like this
one, will be generating a PDF document that needs to be eye-balled
in order to find out if it is 'correct'.
"""
__version__='''$Id$'''
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation
setOutDir(__name__)
import sys, os
from os.path import join, basename, splitext
from math import sqrt
import random
import unittest
from reportlab.lib.units import inch, cm
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.platypus.xpreformatted import XPreformatted
from reportlab.platypus.frames import Frame
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.flowables import Flowable, Spacer
from reportlab.platypus.doctemplate \
import PageTemplate, BaseDocTemplate
from reportlab.platypus import tableofcontents, PageBreak
from reportlab.lib import randomtext
from xml.sax.saxutils import escape as xmlEscape
def myMainPageFrame(canvas, doc):
"The page frame used for all PDF documents."
canvas.saveState()
canvas.rect(2.5*cm, 2.5*cm, 15*cm, 25*cm)
canvas.setFont('Times-Roman', 12)
pageNumber = canvas.getPageNumber()
canvas.drawString(10*cm, cm, str(pageNumber))
canvas.restoreState()
class MyDocTemplate(BaseDocTemplate):
"The document template used for all PDF documents."
_invalidInitArgs = ('pageTemplates',)
def __init__(self, filename, **kw):
frame1 = Frame(2.5*cm, 2.5*cm, 15*cm, 25*cm, id='F1')
self.allowSplitting = 0
BaseDocTemplate.__init__(self, filename, **kw)
template = PageTemplate('normal', [frame1], myMainPageFrame)
self.addPageTemplates(template)
def afterFlowable(self, flowable):
"Registers TOC entries."
if flowable.__class__.__name__ == 'Paragraph':
styleName = flowable.style.name
if styleName[:7] == 'Heading':
key = str(hash(flowable))
self.canv.bookmarkPage(key)
# Register TOC entries.
level = int(styleName[7:])
text = flowable.getPlainText()
pageNum = self.page
# Try calling this with and without a key to test both
# Entries of every second level will have links, others won't
if level % 2 == 1:
self.notify('TOCEntry', (level, text, pageNum, key))
else:
self.notify('TOCEntry', (level, text, pageNum))
def makeHeaderStyle(level, fontName='Times-Roman'):
"Make a header style for different levels."
assert level >= 0, "Level must be >= 0."
PS = ParagraphStyle
size = 24.0 / sqrt(1+level)
style = PS(name = 'Heading' + str(level),
fontName = fontName,
fontSize = size,
leading = size*1.2,
spaceBefore = size/4.0,
spaceAfter = size/8.0)
return style
def makeBodyStyle():
"Body text style - the default will do"
return ParagraphStyle('body')
def makeTocHeaderStyle(level, delta, epsilon, fontName='Times-Roman'):
"Make a header style for different levels."
assert level >= 0, "Level must be >= 0."
PS = ParagraphStyle
size = 12
style = PS(name = 'Heading' + str(level),
fontName = fontName,
fontSize = size,
leading = size*1.2,
spaceBefore = size/4.0,
spaceAfter = size/8.0,
firstLineIndent = -epsilon,
leftIndent = level*delta + epsilon)
return style
class TocTestCase(unittest.TestCase):
"Test TableOfContents class (eyeball-test)."
def test0(self):
"""Test story with TOC and a cascaded header hierarchy.
The story should contain exactly one table of contents that is
immediatly followed by a list of of cascaded levels of header
lines, each nested one level deeper than the previous one.
Features to be visually confirmed by a human being are:
1. TOC lines are indented in multiples of 1 cm.
2. Wrapped TOC lines continue with additional 0.5 cm indentation.
3. Only entries of every second level has links
...
"""
maxLevels = 12
# Create styles to be used for document headers
# on differnet levels.
headerLevelStyles = []
for i in range(maxLevels):
headerLevelStyles.append(makeHeaderStyle(i))
# Create styles to be used for TOC entry lines
# for headers on differnet levels.
tocLevelStyles = []
d, e = tableofcontents.delta, tableofcontents.epsilon
for i in range(maxLevels):
tocLevelStyles.append(makeTocHeaderStyle(i, d, e))
# Build story.
story = []
styleSheet = getSampleStyleSheet()
bt = styleSheet['BodyText']
description = '<font color=red>%s</font>' % self.test0.__doc__
story.append(XPreformatted(description, bt))
toc = tableofcontents.TableOfContents()
toc.levelStyles = tocLevelStyles
story.append(toc)
for i in range(maxLevels):
story.append(Paragraph('HEADER, LEVEL %d' % i,
headerLevelStyles[i]))
#now put some body stuff in.
txt = xmlEscape(randomtext.randomText(randomtext.PYTHON, 5))
para = Paragraph(txt, makeBodyStyle())
story.append(para)
path = outputfile('test_platypus_toc.pdf')
doc = MyDocTemplate(path)
doc.multiBuild(story)
def test1(self):
"""This shows a table which would take more than one page,
and need multiple passes to render. But we preload it
with the right headings to make it go faster. We used
a simple 100-chapter document with one level.
"""
chapters = 30 #goes over one page
headerStyle = makeHeaderStyle(0)
d, e = tableofcontents.delta, tableofcontents.epsilon
tocLevelStyle = makeTocHeaderStyle(0, d, e)
# Build most of the story; we'll re-use it to
# make documents with different numbers of passes.
story = []
styleSheet = getSampleStyleSheet()
bt = styleSheet['BodyText']
description = '<font color=red>%s</font>' % self.test1.__doc__
story.append(XPreformatted(description, bt))
for i in range(chapters):
story.append(PageBreak())
story.append(Paragraph('This is chapter %d' % (i+1),
headerStyle))
#now put some lengthy body stuff in.
for paras in range(random.randint(1,3)):
txt = xmlEscape(randomtext.randomText(randomtext.PYTHON, 5))
para = Paragraph(txt, makeBodyStyle())
story.append(para)
#try 1: empty TOC, 3 passes
toc = tableofcontents.TableOfContents()
toc.levelStyles = [tocLevelStyle] #only need one
story1 = [toc] + story
path = outputfile('test_platypus_toc_preload.pdf')
doc = MyDocTemplate(path)
passes = doc.multiBuild(story1)
self.assertEquals(passes, 3)
#try 2: now preload the TOC with the entries
toc = tableofcontents.TableOfContents()
toc.levelStyles = [tocLevelStyle] #only need one
tocEntries = []
for i in range(chapters):
#add tuple of (level, text, pageNum, key)
#with an initial guess of pageNum=0
tocEntries.append((0, 'This is chapter %d' % (i+1), 0, None))
toc.addEntries(tocEntries)
story2 = [toc] + story
path = outputfile('test_platypus_toc_preload.pdf')
doc = MyDocTemplate(path)
passes = doc.multiBuild(story2)
self.assertEquals(passes, 2)
#try 3: preload again but try to be really smart and work out
#in advance what page everything starts on. We cannot
#use a random story for this.
toc3 = tableofcontents.TableOfContents()
toc3.levelStyles = [tocLevelStyle] #only need one
tocEntries = []
for i in range(chapters):
#add tuple of (level, text, pageNum, key)
#with an initial guess of pageNum= 3
tocEntries.append((0, 'This is chapter %d' % i, 2+i, None))
toc3.addEntries(tocEntries)
story3 = [toc3]
for i in range(chapters):
story3.append(PageBreak())
story3.append(Paragraph('This is chapter %d' % (i+1),
headerStyle))
txt = """
The paragraphs in this are not at all random, because
we need to be absolutely, totally certain they will fit
on one page. Each chapter will be one page long.
"""
para = Paragraph(txt, makeBodyStyle())
story3.append(para)
path = outputfile('test_platypus_toc_preload.pdf')
doc = MyDocTemplate(path)
passes = doc.multiBuild(story3)
# I can't get one pass yet'
#self.assertEquals(passes, 1)
def test2(self):
chapters = 20 #so we know we use only one page
from reportlab.lib.colors import pink
#TOC and this HParagraph class just handle the collection
TOC = []
fontSize = 14
leading = fontSize*1.2
descent = 0.2*fontSize
x = 2.5*cm #these come from the frame size
y = (25+2.5)*cm - leading
x1 = (15+2.5)*cm
class HParagraph(Paragraph):
def __init__(self,key,text,*args,**kwds):
self._label = text
self._key = key
Paragraph.__init__(self,text,*args,**kwds)
def draw(self):
Paragraph.draw(self)
TOC.append((self._label,self.canv.getPageNumber(),self._key))
self.canv.bookmarkHorizontal('TOC_%s' % self._key,0,+20)
class UseForm(Flowable):
_ZEROSIZE = 1
def __init__(self,formName):
self._formName = formName
self.width = self.height = 0
def draw(self):
self.canv.doForm(self._formName)
for i in range(chapters):
yb = y - i*leading #baseline
self.canv.linkRect('','TOC_%s' % i,(x,yb-descent,x1,yb+fontSize),thickness=0.5,color=pink,relative=0)
def drawOn(self,canvas,x,y,_sW=0):
Flowable.drawOn(self,canvas,0,0,canvas._pagesize[0])
class MakeForm(UseForm):
def draw(self):
canv = self.canv
canv.saveState()
canv.beginForm(self._formName)
canv.setFont("Helvetica",fontSize)
for i,(text,pageNumber,key) in enumerate(TOC):
yb = y - i*leading #baseline
canv.drawString(x,yb,text)
canv.drawRightString(x1,y-i*leading,str(pageNumber))
canv.endForm()
canv.restoreState()
headerStyle = makeHeaderStyle(0)
S = [Spacer(0,180),UseForm('TOC')]
RT = 'STARTUP COMPUTERS BLAH BUZZWORD STARTREK PRINTING PYTHON CHOMSKY'.split()
for i in range(chapters):
S.append(PageBreak())
S.append(HParagraph(i,'This is chapter %d' % (i+1), headerStyle))
txt = xmlEscape(randomtext.randomText(RT[i*13 % len(RT)], 15))
para = Paragraph(txt, makeBodyStyle())
S.append(para)
S.append(MakeForm('TOC'))
doc = MyDocTemplate(outputfile('test_platypus_toc_simple.pdf'))
doc.build(S)
def makeSuite():
return makeSuiteForClasses(TocTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
| mit |
google/llvm-propeller | llvm/utils/extract_vplan.py | 7 | 1612 | #!/usr/bin/env python
# This script extracts the VPlan digraphs from the vectoriser debug messages
# and saves them in individual dot files (one for each plan). Optionally, and
# providing 'dot' is installed, it can also render the dot into a PNG file.
from __future__ import print_function
import sys
import re
import argparse
import shutil
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument('--png', action='store_true')
args = parser.parse_args()
dot = shutil.which('dot')
if args.png and not dot:
raise RuntimeError("Can't export to PNG without 'dot' in the system")
pattern = re.compile(r"(digraph VPlan {.*?\n})",re.DOTALL)
matches = re.findall(pattern, sys.stdin.read())
for vplan in matches:
m = re.search("graph \[.+(VF=.+,UF.+)", vplan)
if not m:
raise ValueError("Can't get the right VPlan name")
name = re.sub('[^a-zA-Z0-9]', '', m.group(1))
if args.png:
filename = 'VPlan' + name + '.png'
print("Exporting " + name + " to PNG via dot: " + filename)
p = subprocess.Popen([dot, '-Tpng', '-o', filename],
encoding='utf-8',
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate(input=vplan)
if err:
raise RuntimeError("Error running dot: " + err)
else:
filename = 'VPlan' + name + '.dot'
print("Exporting " + name + " to DOT: " + filename)
with open(filename, 'w') as out:
out.write(vplan)
| apache-2.0 |
CCI-MOC/nova | plugins/xenserver/xenapi/etc/xapi.d/plugins/pluginlib_nova.py | 70 | 4923 | # Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE: XenServer still only supports Python 2.4 in it's dom0 userspace
# which means the Nova xenapi plugins must use only Python 2.4 features
#
# Helper functions for the Nova xapi plugins. In time, this will merge
# with the pluginlib.py shipped with xapi, but for now, that file is not
# very stable, so it's easiest just to have a copy of all the functions
# that we need.
#
import gettext
import logging
import logging.handlers
import time
import XenAPI
translations = gettext.translation('nova', fallback=True)
_ = translations.ugettext
# Logging setup
def configure_logging(name):
log = logging.getLogger()
log.setLevel(logging.DEBUG)
sysh = logging.handlers.SysLogHandler('/dev/log')
sysh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%s: %%(levelname)-8s %%(message)s' % name)
sysh.setFormatter(formatter)
log.addHandler(sysh)
# Exceptions
class PluginError(Exception):
"""Base Exception class for all plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class ArgumentError(PluginError):
"""Raised when required arguments are missing, argument values are invalid,
or incompatible arguments are given.
"""
def __init__(self, *args):
PluginError.__init__(self, *args)
# Argument validation
def exists(args, key):
"""Validates that a freeform string argument to a RPC method call is given.
Returns the string.
"""
if key in args:
return args[key]
else:
raise ArgumentError(_('Argument %s is required.') % key)
def optional(args, key):
"""If the given key is in args, return the corresponding value, otherwise
return None
"""
return key in args and args[key] or None
def _get_domain_0(session):
this_host_ref = session.xenapi.session.get_this_host(session.handle)
expr = 'field "is_control_domain" = "true" and field "resident_on" = "%s"'
expr = expr % this_host_ref
return session.xenapi.VM.get_all_records_where(expr).keys()[0]
def with_vdi_in_dom0(session, vdi, read_only, f):
dom0 = _get_domain_0(session)
vbd_rec = {}
vbd_rec['VM'] = dom0
vbd_rec['VDI'] = vdi
vbd_rec['userdevice'] = 'autodetect'
vbd_rec['bootable'] = False
vbd_rec['mode'] = read_only and 'RO' or 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
logging.debug(_('Creating VBD for VDI %s ... '), vdi)
vbd = session.xenapi.VBD.create(vbd_rec)
logging.debug(_('Creating VBD for VDI %s done.'), vdi)
try:
logging.debug(_('Plugging VBD %s ... '), vbd)
session.xenapi.VBD.plug(vbd)
logging.debug(_('Plugging VBD %s done.'), vbd)
return f(session.xenapi.VBD.get_device(vbd))
finally:
logging.debug(_('Destroying VBD for VDI %s ... '), vdi)
_vbd_unplug_with_retry(session, vbd)
try:
session.xenapi.VBD.destroy(vbd)
except XenAPI.Failure, e: # noqa
logging.error(_('Ignoring XenAPI.Failure %s'), e)
logging.debug(_('Destroying VBD for VDI %s done.'), vdi)
def _vbd_unplug_with_retry(session, vbd):
"""Call VBD.unplug on the given VBD, with a retry if we get
DEVICE_DETACH_REJECTED. For reasons which I don't understand, we're
seeing the device still in use, even when all processes using the device
should be dead.
"""
while True:
try:
session.xenapi.VBD.unplug(vbd)
logging.debug(_('VBD.unplug successful first time.'))
return
except XenAPI.Failure, e: # noqa
if (len(e.details) > 0 and
e.details[0] == 'DEVICE_DETACH_REJECTED'):
logging.debug(_('VBD.unplug rejected: retrying...'))
time.sleep(1)
elif (len(e.details) > 0 and
e.details[0] == 'DEVICE_ALREADY_DETACHED'):
logging.debug(_('VBD.unplug successful eventually.'))
return
else:
logging.error(_('Ignoring XenAPI.Failure in VBD.unplug: %s'),
e)
return
| apache-2.0 |
guildai/guild | examples/iris-svm/plot_iris_exercise.py | 1 | 1702 | """
A tutorial exercise for using different SVM kernels.
Adapted from:
https://scikit-learn.org/stable/auto_examples/exercises/plot_iris_exercise.html
"""
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from sklearn import datasets, svm
kernel = 'linear' # choice of linear, rbf, poly
test_split = 0.1
random_seed = 0
degree = 3
gamma = 10
iris = datasets.load_iris()
X = iris.data
y = iris.target
X = X[y != 0, :2]
y = y[y != 0]
n_sample = len(X)
np.random.seed(random_seed)
order = np.random.permutation(n_sample)
X = X[order]
y = y[order].astype(np.float)
split_pos = int((1 - test_split) * n_sample)
X_train = X[:split_pos]
y_train = y[:split_pos]
X_test = X[split_pos:]
y_test = y[split_pos:]
# fit the model
clf = svm.SVC(kernel=kernel, degree=degree, gamma=gamma)
clf.fit(X_train, y_train)
print("Train accuracy: %s" % clf.score(X_train, y_train))
print("Test accuracy: %f" % clf.score(X_test, y_test))
plt.figure()
plt.clf()
plt.scatter(X[:, 0], X[:, 1], c=y, zorder=10, cmap=plt.cm.Paired,
edgecolor='k', s=20)
# Circle out the test data
plt.scatter(X_test[:, 0], X_test[:, 1], s=80, facecolors='none',
zorder=10, edgecolor='k')
plt.axis('tight')
x_min = X[:, 0].min()
x_max = X[:, 0].max()
y_min = X[:, 1].min()
y_max = X[:, 1].max()
XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
# Put the result into a color plot
Z = Z.reshape(XX.shape)
plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired)
plt.contour(XX, YY, Z, colors=['k', 'k', 'k'],
linestyles=['--', '-', '--'], levels=[-.5, 0, .5])
plt.title(kernel)
plt.savefig("plot.png")
| apache-2.0 |
tigerking/pyvision | src/pyvision/types/Perspective.py | 1 | 10434 | # PyVision License
#
# Copyright (c) 2006-2008 David S. Bolme
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##
# This class Performs a perspective transform on an image. Primarily
# implemented using OpenCV: cvWarpPerspective
##
import unittest
import os.path
import math
import cv
import numpy as np
import numpy.linalg as la
import pyvision as pv
def logPolar(im,center=None,radius=None,M=None,size=(64,128)):
'''
Produce a log polar transform of the image. See OpenCV for details.
The scale space is calculated based on radius or M. If both are given
M takes priority.
'''
#M=1.0
w,h = im.size
if radius == None:
radius = 0.5*min(w,h)
if M == None:
#rho=M*log(sqrt(x2+y2))
#size[0] = M*log(r)
M = size[0]/np.log(radius)
if center == None:
center = pv.Point(0.5*w,0.5*h)
src = im.asOpenCV()
dst = cv.cvCreateImage( cv.cvSize(size[0],size[1]), 8, 3 );
cv.cvLogPolar( src, dst, center.asOpenCV(), M, cv.CV_INTER_LINEAR+cv.CV_WARP_FILL_OUTLIERS )
return pv.Image(dst)
##
# Create a transform or homography that optimally maps one set of points to
# the other set of points. Requires at least four points.
# A B C x1 = x2
# D E F y1 = y2
# G H 1 w1 = w2
#
def PerspectiveFromPointsOld(source, dest, new_size):
'''
Python/Scipy implementation implementation which finds a perspective
transform between points.
Most users should use PerspectiveFromPoints instead. This method
may be eliminated in the future.
'''
assert len(source) == len(dest)
src_nrm = pv.AffineNormalizePoints(source)
source = src_nrm.transformPoints(source)
dst_nrm = pv.AffineNormalizePoints(dest)
dest = dst_nrm.transformPoints(dest)
A = []
for i in range(len(source)):
src = source[i]
dst = dest[i]
# See Hartley and Zisserman Ch. 4.1, 4.1.1, 4.4.4
row1 = [0.0,0.0,0.0,-dst.w*src.x,-dst.w*src.y,-dst.w*src.w,dst.y*src.x,dst.y*src.y,dst.y*src.w]
row2 = [dst.w*src.x,dst.w*src.y,dst.w*src.w,0.0,0.0,0.0,-dst.x*src.x,-dst.x*src.y,-dst.x*src.w]
#row3 = [-dst.y*src.x,-dst.y*src.y,-dst.y*src.w,dst.x*src.x,dst.x*src.y,dst.x*src.w,0.0,0.0,0.0]
A.append(row1)
A.append(row2)
#A.append(row3)
A = np.array(A)
U,D,Vt = la.svd(A)
H = Vt[8,:].reshape(3,3)
matrix = np.dot(dst_nrm.inverse,np.dot(H,src_nrm.matrix))
return PerspectiveTransform(matrix,new_size)
def PerspectiveFromPoints(source, dest, new_size, method=0, ransacReprojThreshold=1.5):
'''
Calls the OpenCV function: cvFindHomography. This method has
additional options to use the CV_RANSAC or CV_LMEDS methods to
find a robust homography. Method=0 appears to be similar to
PerspectiveFromPoints.
'''
assert len(source) == len(dest)
n_points = len(source)
s = cv.cvCreateMat(n_points,2,cv.CV_32F)
d = cv.cvCreateMat(n_points,2,cv.CV_32F)
p = cv.cvCreateMat(3,3,cv.CV_32F)
for i in range(n_points):
s[i,0] = source[i].X()
s[i,1] = source[i].Y()
d[i,0] = dest[i].X()
d[i,1] = dest[i].Y()
results = cv.cvFindHomography(s,d,p,method,ransacReprojThreshold)
matrix = pv.OpenCVToNumpy(p)
return PerspectiveTransform(matrix,new_size)
##
# The PerspectiveTransform class is used to transform images and points back and
# and forth between different coordinate systems.
#
#
#
# @param matrix a 3-by-3 matrix that defines the transformation.
# @param new_size the size of any new images created by this affine transform.
# @keyparam filter the image filtering function used for interpolating between pixels.
# @return an AffineTransform object
class PerspectiveTransform:
def __init__(self,matrix,new_size,filter=None):
self.matrix = matrix
self.inverse = la.inv(matrix)
self.size = new_size
self.filter = filter
def __call__(self,data):
'''
This is a simple interface to transform images or points. Simply
call the affine transform like a function and it will try to automatically
transform the argument.
@param data: an image, point, or list of points.
'''
if isinstance(data,pv.Image):
return self.transformImage(data)
elif isinstance(data,pv.Point):
return self.transformPoint(data)
else: # assume this is a list of points
return self.transformPoints(data)
##
# Transforms an image into the new coordinate system.
#
# @param im an pv.Image object
def transformImage(self,im):
''' Transform an image. '''
if isinstance(self.matrix,cv.cvmat):
matrix = self.matrix
else:
matrix = pv.NumpyToOpenCV(self.matrix)
src = im.asOpenCV()
dst = cv.CreateImage( (self.size[0],self.size[1]), cv.IPL_DEPTH_8U, src.nChannels );
cv.WarpPerspective( src, dst, matrix)
return pv.Image(dst)
##
# Transforms a link.Point into the new coordinate system.
#
# @param pt a link.Point
def transformPoint(self,pt):
''' Transform a point from the old image to the new image '''
vec = np.dot(self.matrix,pt.asVector2H())
return pv.Point(x=vec[0,0],y=vec[1,0],w=vec[2,0])
##
# Transforms a list of link.Points into the new coordinate system.
#
# @param pts a list of link.Points
def transformPoints(self,pts):
''' Transform a point from the old image to the new image '''
return [ self.transformPoint(pt) for pt in pts ]
##
# Transforms a link.Point from the new coordinate system to
# the old coordinate system.
#
# @param pts a list of link.Points
def invertPoint(self,pt):
''' Transform a point from the old image to the new image '''
vec = np.dot(self.inverse,pt.asVector2H())
return pv.Point(x=vec[0,0],y=vec[1,0],w=vec[2,0])
##
# Transforms a list of link.Points from the new coordinate system to
# the old coordinate system.
#
# @param pts a list of link.Points
def invertPoints(self,pts):
''' Transform a point from the old image to the new image '''
return [ self.invertPoint(pt) for pt in pts ]
##
# @return the transform as a 3 by 3 matrix
def asMatrix(self):
''' Return the transform as a 3 by 3 matrix '''
return self.matrix
##
# Used to concatinate transforms. For example:
# <pre>
# # This code first scales and then translates
# S = AffineScale(2.0)
# T = AffineTranslate(4,5)
# A = T*S
# new_im = A.transformImage(old_im)
# </pre>
# @return a single link.AffineTransform which is the the same as
# both affine transforms.
def __mul__(self,affine):
return PerspectiveTransform(np.dot(self.matrix,affine.matrix),self.size,self.filter)
# TODO: Add unit tests
class _PerspectiveTest(unittest.TestCase):
def setUp(self):
fname_a = os.path.join(pv.__path__[0],'data','test','perspective1a.jpg')
fname_b = os.path.join(pv.__path__[0],'data','test','perspective1b.jpg')
self.im_a = pv.Image(fname_a)
self.im_b = pv.Image(fname_b)
#corners clockwize: upper left, upper right, lower right, lower left
self.corners_a = (pv.Point(241,136),pv.Point(496,140),pv.Point(512,343),pv.Point(261,395))
self.corners_b = (pv.Point(237,165),pv.Point(488,177),pv.Point(468,392),pv.Point(222,347))
self.corners_t = (pv.Point(0,0),pv.Point(639,0),pv.Point(639,479),pv.Point(0,479))
for pt in self.corners_a:
self.im_a.annotatePoint(pt)
#self.im_a.show()
#self.im_b.show()
def test_four_points_a(self):
p = PerspectiveFromPoints(self.corners_a,self.corners_t,(640,480))
pts = p.transformPoints(self.corners_a)
#for pt in pts:
# print "Point: %7.2f %7.2f"%(pt.X(), pt.Y())
im = p.transformImage(self.im_a)
#im.show()
def test_four_points_b(self):
p = PerspectiveFromPoints(self.corners_b,self.corners_t,(640,480))
pts = p.transformPoints(self.corners_b)
#for pt in pts:
# print "Point: %7.2f %7.2f"%(pt.X(), pt.Y())
im = p.transformImage(self.im_b)
#im.show()
def test_four_points_ab(self):
p = PerspectiveFromPoints(self.corners_a,self.corners_b,(640,480))
#pts = p.transformPoints(self.corners_b)
#for pt in pts:
# print "Point: %7.2f %7.2f"%(pt.X(), pt.Y())
im = p.transformImage(self.im_a)
#im.show()
#self.im_b.show()
| bsd-3-clause |
mfcabrera/luigi | luigi/interface.py | 4 | 9193 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains the bindings for command line integration and dynamic loading of tasks
If you don't want to run luigi from the command line. You may use the methods
defined in this module to programatically run luigi.
"""
import logging
import logging.config
import os
import sys
import tempfile
import signal
import warnings
from luigi import configuration
from luigi import lock
from luigi import parameter
from luigi import rpc
from luigi import scheduler
from luigi import task
from luigi import worker
from luigi import execution_summary
from luigi.cmdline_parser import CmdlineParser
def setup_interface_logging(conf_file=''):
# use a variable in the function object to determine if it has run before
if getattr(setup_interface_logging, "has_run", False):
return
if conf_file == '':
logger = logging.getLogger('luigi-interface')
logger.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(levelname)s: %(message)s')
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
else:
logging.config.fileConfig(conf_file, disable_existing_loggers=False)
setup_interface_logging.has_run = True
class core(task.Config):
''' Keeps track of a bunch of environment params.
Uses the internal luigi parameter mechanism.
The nice thing is that we can instantiate this class
and get an object with all the environment variables set.
This is arguably a bit of a hack.
'''
use_cmdline_section = False
local_scheduler = parameter.BoolParameter(
default=False,
description='Use an in-memory central scheduler. Useful for testing.',
always_in_help=True)
scheduler_host = parameter.Parameter(
default='localhost',
description='Hostname of machine running remote scheduler',
config_path=dict(section='core', name='default-scheduler-host'))
scheduler_port = parameter.IntParameter(
default=8082,
description='Port of remote scheduler api process',
config_path=dict(section='core', name='default-scheduler-port'))
scheduler_url = parameter.Parameter(
default='',
description='Full path to remote scheduler',
config_path=dict(section='core', name='default-scheduler-url'),
)
lock_size = parameter.IntParameter(
default=1,
description="Maximum number of workers running the same command")
no_lock = parameter.BoolParameter(
default=False,
description='Ignore if similar process is already running')
lock_pid_dir = parameter.Parameter(
default=os.path.join(tempfile.gettempdir(), 'luigi'),
description='Directory to store the pid file')
take_lock = parameter.BoolParameter(
default=False,
description='Signal other processes to stop getting work if already running')
workers = parameter.IntParameter(
default=1,
description='Maximum number of parallel tasks to run')
logging_conf_file = parameter.Parameter(
default='',
description='Configuration file for logging')
module = parameter.Parameter(
default='',
description='Used for dynamic loading of modules',
always_in_help=True)
parallel_scheduling = parameter.BoolParameter(
default=False,
description='Use multiprocessing to do scheduling in parallel.')
assistant = parameter.BoolParameter(
default=False,
description='Run any task from the scheduler.')
help = parameter.BoolParameter(
default=False,
description='Show most common flags and all task-specific flags',
always_in_help=True)
help_all = parameter.BoolParameter(
default=False,
description='Show all command line flags',
always_in_help=True)
class _WorkerSchedulerFactory(object):
def create_local_scheduler(self):
return scheduler.Scheduler(prune_on_get_work=True, record_task_history=False)
def create_remote_scheduler(self, url):
return rpc.RemoteScheduler(url)
def create_worker(self, scheduler, worker_processes, assistant=False):
return worker.Worker(
scheduler=scheduler, worker_processes=worker_processes, assistant=assistant)
def _schedule_and_run(tasks, worker_scheduler_factory=None, override_defaults=None):
"""
:param tasks:
:param worker_scheduler_factory:
:param override_defaults:
:return: True if all tasks and their dependencies were successfully run (or already completed);
False if any error occurred.
"""
if worker_scheduler_factory is None:
worker_scheduler_factory = _WorkerSchedulerFactory()
if override_defaults is None:
override_defaults = {}
env_params = core(**override_defaults)
# search for logging configuration path first on the command line, then
# in the application config file
logging_conf = env_params.logging_conf_file
if logging_conf != '' and not os.path.exists(logging_conf):
raise Exception(
"Error: Unable to locate specified logging configuration file!"
)
if not configuration.get_config().getboolean(
'core', 'no_configure_logging', False):
setup_interface_logging(logging_conf)
kill_signal = signal.SIGUSR1 if env_params.take_lock else None
if (not env_params.no_lock and
not(lock.acquire_for(env_params.lock_pid_dir, env_params.lock_size, kill_signal))):
raise PidLockAlreadyTakenExit()
if env_params.local_scheduler:
sch = worker_scheduler_factory.create_local_scheduler()
else:
if env_params.scheduler_url != '':
url = env_params.scheduler_url
else:
url = 'http://{host}:{port:d}/'.format(
host=env_params.scheduler_host,
port=env_params.scheduler_port,
)
sch = worker_scheduler_factory.create_remote_scheduler(url=url)
worker = worker_scheduler_factory.create_worker(
scheduler=sch, worker_processes=env_params.workers, assistant=env_params.assistant)
success = True
logger = logging.getLogger('luigi-interface')
with worker:
for t in tasks:
success &= worker.add(t, env_params.parallel_scheduling)
logger.info('Done scheduling tasks')
success &= worker.run()
logger.info(execution_summary.summary(worker))
return dict(success=success, worker=worker)
class PidLockAlreadyTakenExit(SystemExit):
"""
The exception thrown by :py:func:`luigi.run`, when the lock file is inaccessible
"""
pass
def run(*args, **kwargs):
return _run(*args, **kwargs)['success']
def _run(cmdline_args=None, main_task_cls=None,
worker_scheduler_factory=None, use_dynamic_argparse=None, local_scheduler=False):
"""
Please dont use. Instead use `luigi` binary.
Run from cmdline using argparse.
:param cmdline_args:
:param main_task_cls:
:param worker_scheduler_factory:
:param use_dynamic_argparse: Deprecated and ignored
:param local_scheduler:
"""
if use_dynamic_argparse is not None:
warnings.warn("use_dynamic_argparse is deprecated, don't set it.",
DeprecationWarning, stacklevel=2)
if cmdline_args is None:
cmdline_args = sys.argv[1:]
if main_task_cls:
cmdline_args.insert(0, main_task_cls.task_family)
if local_scheduler:
cmdline_args.insert(0, '--local-scheduler')
with CmdlineParser.global_instance(cmdline_args) as cp:
return _schedule_and_run([cp.get_task_obj()], worker_scheduler_factory)
def build(tasks, worker_scheduler_factory=None, **env_params):
"""
Run internally, bypassing the cmdline parsing.
Useful if you have some luigi code that you want to run internally.
Example:
.. code-block:: python
luigi.build([MyTask1(), MyTask2()], local_scheduler=True)
One notable difference is that `build` defaults to not using
the identical process lock. Otherwise, `build` would only be
callable once from each process.
:param tasks:
:param worker_scheduler_factory:
:param env_params:
:return: True if there were no scheduling errors, even if tasks may fail.
"""
if "no_lock" not in env_params:
env_params["no_lock"] = True
return _schedule_and_run(tasks, worker_scheduler_factory, override_defaults=env_params)['success']
| apache-2.0 |
ImaginationForPeople/sockjs-tornado | sockjs/tornado/transports/htmlfile.py | 10 | 2418 | # -*- coding: utf-8 -*-
"""
sockjs.tornado.transports.htmlfile
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
HtmlFile transport implementation.
"""
from tornado.web import asynchronous
from sockjs.tornado import proto
from sockjs.tornado.transports import streamingbase
# HTMLFILE template
HTMLFILE_HEAD = r'''
<!doctype html>
<html><head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
</head><body><h2>Don't panic!</h2>
<script>
document.domain = document.domain;
var c = parent.%s;
c.start();
function p(d) {c.message(d);};
window.onload = function() {c.stop();};
</script>
'''.strip()
HTMLFILE_HEAD += ' ' * (1024 - len(HTMLFILE_HEAD) + 14)
HTMLFILE_HEAD += '\r\n\r\n'
class HtmlFileTransport(streamingbase.StreamingTransportBase):
name = 'htmlfile'
def initialize(self, server):
super(HtmlFileTransport, self).initialize(server)
@asynchronous
def get(self, session_id):
# Start response
self.preflight()
self.handle_session_cookie()
self.disable_cache()
self.set_header('Content-Type', 'text/html; charset=UTF-8')
# Grab callback parameter
callback = self.get_argument('c', None)
if not callback:
self.write('"callback" parameter required')
self.set_status(500)
self.finish()
return
# TODO: Fix me - use parameter
self.write(HTMLFILE_HEAD % callback)
self.flush()
# Now try to attach to session
if not self._attach_session(session_id):
self.finish()
return
# Flush any pending messages
if self.session:
self.session.flush()
def send_pack(self, message, binary=False):
if binary:
raise Exception('binary not supported for HtmlFileTransport')
# TODO: Just do escaping
msg = '<script>\np(%s);\n</script>\r\n' % proto.json_encode(message)
self.active = False
try:
self.notify_sent(len(message))
self.write(msg)
self.flush(callback=self.send_complete)
except IOError:
# If connection dropped, make sure we close offending session instead
# of propagating error all way up.
self.session.delayed_close()
self._detach()
| mit |
jcchin/MagnePlane | src/hyperloop/Python/tube/tube_wall_temp.py | 4 | 21199 | """The `TubeTemp` group represents a cycle of explicit and implicit
calculations to determine the steady state temperature of the
hyperloop tube.
The `TubeWallTemp` calculates Q released/absorbed by hyperloop tube due to:
Internal Convection, Tube Conduction, Ambient Natural Convection,
Solar Flux In, Radiation Out
The `TempBalance` implicit component varies the boundary temp between the
inside and outside of the tube, until Q released matches Q absorbed.
"""
from math import log, pi, sqrt, e
from openmdao.core.group import Group, Component, IndepVarComp
from openmdao.solvers.newton import Newton
from openmdao.units.units import convert_units as cu
from openmdao.solvers.scipy_gmres import ScipyGMRES
from openmdao.api import NLGaussSeidel
from openmdao.solvers.ln_gauss_seidel import LinearGaussSeidel
from openmdao.solvers.ln_direct import DirectSolver
from pycycle import species_data
from pycycle.species_data import janaf
from pycycle.components import FlowStart
from pycycle.constants import AIR_FUEL_MIX, AIR_MIX
from pycycle.flowstation import FlowIn, PassThrough
class TempBalance(Component):
"""
Params
------
tube_area : float
tube inner area (m^2)
tube_thickness : float
tube thickness (m)
length_tube : float
Length of the entire Hyperloop tube (m)
num_pods : float
Number of Pods in the Tube at a given time
temp_boundary : float
Average Temperature of the tube wall (K). This state variable is varied
temp_outside_ambient : float
Average Temperature of the outside air (K)
nozzle_air_W : float
mass flow rate of the air exiting the pod nozzle (kg/s)
nozzle_air_T : float
temp of the air exiting the pod nozzle (K)
solar_insolation : float
solar irradiation at sea level on a clear day
nn_incidence_factor : float
Non-normal incidence factor
surface_reflectance : float
Solar Reflectance Index
emissivity_tube : float
Emmissivity of the Tube
sb_constant : float
Stefan-Boltzmann Constant (W/((m**2)*(K**4)))
Nu_multiplier : float
optional fudge factor on Nusslet number to account for
a small breeze on tube, 1 assumes no breeze
Returns
-------
diameter_outer_tube : float
outer diameter of the tube
nozzle_q : float
heat released from the nozzle (W)
q_per_area_solar : float
Solar Heat Rate Absorbed per Area (W/m**2)
q_total_solar : float
Solar Heat Absorbed by Tube (W)
area_rad : float
Tube Radiating Area (m**2)
GrDelTL3 : float
see [_1], Natural Convection Term (1/((ft**3)*F)))
Pr : float
Prandtl #
Gr : float
Grashof #
Ra : float
Rayleigh #
Nu : float
Nusselt #
k : float
Thermal conductivity (W/(m*K))
h : float
Heat Rate Radiated to the outside (W/((m**2)*K))
area_convection : float
Convection Area (m**2)
q_per_area_nat_conv : float
Heat Radiated per Area to the outside (W/(m**2))
total_q_nat_conv : float
Total Heat Radiated to the outside via Natural Convection (W)
heat_rate_pod : float
Heating Due to a Single Pods (W)
total_heat_rate_pods : float
Heating Due to All Pods (W)
q_rad_per_area : float
Heat Radiated to the outside per area (W/(m**2))
q_rad_tot : float
Heat Radiated to the outside (W)
viewing_angle : float
Effective Area hit by Sun (m**2)
q_total_out : float
Total Heat Released via Radiation and Natural Convection (W)
q_total_in : float
Total Heat Absorbed/Added via Pods and Solar Absorption (W)
ss_temp_residual : float
Energy balance to be driven to zero (K)
Notes
-----
Some of the original calculations from Jeff Berton, ported and extended by
Jeff Chin. Compatible with OpenMDAO v1.5, python 2 and 3
References
----------
.. [1] https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51
.. [2] 3rd Ed. of Introduction to Heat Transfer by Incropera and DeWitt,
equations (9.33) and (9.34) on page 465
"""
def __init__(self):
super(TempBalance, self).__init__()
self.add_param('ss_temp_residual', val=0.)
self.add_state('temp_boundary', val=322.0)
def solve_nonlinear(self, params, unknowns, resids):
pass
def apply_nonlinear(self, params, unknowns, resids):
resids['temp_boundary'] = params[
'ss_temp_residual'] #drive ss_temp_residual to 0
def apply_linear(self, params, unknowns, dparams, dunknowns, dresids,
mode):
if mode == "fwd":
if 'ss_temp_residual' in dparams and 'temp_boundary' in dresids:
dresids['temp_boundary'] += dparams['ss_temp_residual']
if mode == "rev":
if 'temp_boundary' in dresids and 'ss_temp_residual' in dparams:
dparams['ss_temp_residual'] += dresids['temp_boundary']
class TubeWallTemp(Component):
""" Calculates Q released/absorbed by the hyperloop tube """
def __init__(self, thermo_data=species_data.janaf, elements=AIR_MIX):
super(TubeWallTemp, self).__init__()
self.deriv_options['type'] = 'fd'
#--Inputs--
#Hyperloop Parameters/Design Variables
self.add_param('tube_area',
3.9057,
units='m**2',
desc='tube inner area')
self.add_param('tube_thickness',
.05,
units='m',
desc='tube thickness') #7.3ft
self.add_param(
'length_tube',
482803.,
units='m',
desc='Length of entire Hyperloop') #300 miles, 1584000ft
self.add_param('num_pods',
34.,
desc='Number of Pods in the Tube at a given time') #
self.add_param('temp_boundary',
322.0,
units='K',
desc='Average Temperature of the tube wall') #
self.add_param('temp_outside_ambient',
305.6,
units='K',
desc='Average Temperature of the outside air') #
#nozzle_air = FlowIn(iotype="in", desc="air exiting the pod nozzle")
#bearing_air = FlowIn(iotype="in", desc="air exiting the air bearings")
self.add_param('nozzle_air_W',
34.,
#units = 'kg/s',
desc='mass flow rate of the air exiting the pod nozzle')
self.add_param('nozzle_air_Cp',
1.009,
units='kJ/kg/K',
desc='specific heat of air exiting the pod nozzle')
self.add_param('nozzle_air_Tt',
34.,
#units = 'K',
desc='temp of the air exiting the pod nozzle')
# self.add_param('bearing_air_W', 34., desc='air exiting the air bearings')
# self.add_param('bearing_air_Cp', 34., desc='air exiting the air bearings')
# self.add_param('bearing_air_Tt', 34., desc='air exiting the air bearings')
#constants
self.add_param('solar_insolation',
1000.,
units='W/m**2',
desc='solar irradiation at sea level on a clear day') #
self.add_param('nn_incidence_factor',
0.7,
desc='Non-normal incidence factor') #
self.add_param('surface_reflectance',
0.5,
desc='Solar Reflectance Index') #
self.add_param('emissivity_tube',
0.5,
units='W',
desc='Emmissivity of the Tube') #
self.add_param('sb_constant',
0.00000005670373,
units='W/((m**2)*(K**4))',
desc='Stefan-Boltzmann Constant') #
self.add_param('Nu_multiplier',
1.,
desc="fudge factor on nusslet number to account for small breeze on tube")
#--Outputs--
self.add_output('diameter_outer_tube', shape=1)
self.add_output('bearing_q', shape=1)
self.add_output('nozzle_q', shape=1)
self.add_output('area_viewing', shape=1)
self.add_output('q_per_area_solar',
350.,
units='W/m**2',
desc='Solar Heat Rate Absorbed per Area') #
self.add_output('q_total_solar',
375989751.,
units='W',
desc='Solar Heat Absorbed by Tube') #
self.add_output('area_rad',
337486.1,
units='m**2',
desc='Tube Radiating Area') #
#Required for Natural Convection Calcs
self.add_output('GrDelTL3',
1946216.7,
units='1/((ft**3)*F)',
desc='Heat Radiated to the outside') #
self.add_output('Pr', 0.707, desc='Prandtl') #
self.add_output('Gr', 12730351223., desc='Grashof #') #
self.add_output('Ra', 8996312085., desc='Rayleigh #') #
self.add_output('Nu', 232.4543713, desc='Nusselt #') #
self.add_output('k',
0.02655,
units='W/(m*K)',
desc='Thermal conductivity') #
self.add_output('h',
0.845464094,
units='W/((m**2)*K)',
desc='Heat Radiated to the outside') #
self.add_output('area_convection',
3374876.115,
units='m**2',
desc='Convection Area') #
#Natural Convection
self.add_output('q_per_area_nat_conv',
7.9,
units='W/(m**2)',
desc='Heat Radiated per Area to the outside') #
self.add_output(
'total_q_nat_conv',
286900419.,
units='W',
desc='Total Heat Radiated to the outside via Natural Convection') #
#Exhausted from Pods
self.add_output('heat_rate_pod',
519763.,
units='W',
desc='Heating Due to a Single Pods') #
self.add_output('total_heat_rate_pods',
17671942.,
units='W',
desc='Heating Due to a All Pods') #
#Radiated Out
self.add_output('q_rad_per_area',
31.6,
units='W/(m**2)',
desc='Heat Radiated to the outside') #
self.add_output('q_rad_tot',
106761066.5,
units='W',
desc='Heat Radiated to the outside') #
#Radiated In
self.add_output('viewing_angle',
1074256.,
units='m**2',
desc='Effective Area hit by Sun') #
#Total Heating
self.add_output('q_total_out',
286900419.,
units='W',
desc='Total Heat Released via Radiation and Natural Convection') #
self.add_output('q_total_in',
286900419.,
units='W',
desc='Total Heat Absorbed/Added via Pods and Solar Absorption') #
#Residual (for solver)
self.add_output('ss_temp_residual',
shape=1.,
units='K',
desc='Residual of T_released - T_absorbed')
def solve_nonlinear(self, p, u, r):
"""Calculate Various Paramters"""
u['diameter_outer_tube'] = 2*sqrt(p['tube_area']/pi) + p['tube_thickness']
# u['bearing_q'] = cu(p['bearing_air_W'], 'lbm/s', 'kg/s') * cu(
# p['bearing_air_Cp'], 'Btu/(lbm*degR)', 'J/(kg*K)') * (
# cu(p['bearing_air_Tt'], 'degR', 'degK') - p['temp_boundary'])
u['nozzle_q'] = cu(p['nozzle_air_W'], 'lbm/s', 'kg/s') * cu(
p['nozzle_air_Cp'], 'Btu/(lbm*degR)', 'J/(kg*K)') * (
cu(p['nozzle_air_Tt'], 'degR', 'degK') - p['temp_boundary'])
#Q = mdot * cp * deltaT
u['heat_rate_pod'] = u['nozzle_q'] #+ u['bearing_q']
#Total Q = Q * (number of pods)
u['total_heat_rate_pods'] = u['heat_rate_pod'] * p['num_pods']
#Determine thermal resistance of outside via Natural Convection or forced convection
if (p['temp_outside_ambient'] < 400):
u['GrDelTL3'] = 41780000000000000000 * (
(p['temp_outside_ambient'])**(-4.639)
) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
u['GrDelTL3'] = 4985000000000000000 * (
(p['temp_outside_ambient'])**(-4.284)
) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
#Prandtl Number
#Pr = viscous diffusion rate/ thermal diffusion rate = Cp * dyanamic viscosity / thermal conductivity
#Pr << 1 means thermal diffusivity dominates
#Pr >> 1 means momentum diffusivity dominates
if (p['temp_outside_ambient'] < 400):
u['Pr'] = 1.23 * (
p['temp_outside_ambient']**(-0.09685)
) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
u['Pr'] = 0.59 * (p['temp_outside_ambient']**(0.0239))
#Grashof Number
#Relationship between buoyancy and viscosity
#Laminar = Gr < 10^8
#Turbulent = Gr > 10^9
u['Gr'] = u['GrDelTL3'] * abs(p['temp_boundary'] - p[
'temp_outside_ambient']) * (
u['diameter_outer_tube']**3
) #JSG: Added abs incase subtraction goes negative
#Rayleigh Number
#Buoyancy driven flow (natural convection)
u['Ra'] = u['Pr'] * u['Gr']
#Nusselt Number
#Nu = convecive heat transfer / conductive heat transfer
if (u['Ra'] <= 10**12): #valid in specific flow regime
u['Nu'] = p['Nu_multiplier'] * (
(0.6 + 0.387 * u['Ra']**(1. / 6.) /
(1 + (0.559 / u['Pr'])**(9. / 16.))**(8. / 27.))**2
) #3rd Ed. of Introduction to Heat Transfer by Incropera and DeWitt, equations (9.33) and (9.34) on page 465
if (p['temp_outside_ambient'] < 400):
u['k'] = 0.0001423 * (
p['temp_outside_ambient']**(0.9138)
) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
u['k'] = 0.0002494 * (p['temp_outside_ambient']**(0.8152))
#h = k*Nu/Characteristic Length
u['h'] = (u['k'] * u['Nu']) / u['diameter_outer_tube']
#Convection Area = Surface Area
u['area_convection'] = pi * p['length_tube'] * u['diameter_outer_tube']
#Determine heat radiated per square meter (Q)
u['q_per_area_nat_conv'] = u['h'] * (
p['temp_boundary'] - p['temp_outside_ambient'])
#Determine total heat radiated over entire tube (Qtotal)
u['total_q_nat_conv'] = u['q_per_area_nat_conv'] * u['area_convection']
#Determine heat incoming via Sun radiation (Incidence Flux)
#Sun hits an effective rectangular cross section
u['area_viewing'] = p['length_tube'] * u['diameter_outer_tube']
u['q_per_area_solar'] = (
1 - p['surface_reflectance']
) * p['nn_incidence_factor'] * p['solar_insolation']
u['q_total_solar'] = u['q_per_area_solar'] * u['area_viewing']
#Determine heat released via radiation
#Radiative area = surface area
u['area_rad'] = u['area_convection']
#P/A = SB*emmisitivity*(T^4 - To^4)
u['q_rad_per_area'] = p['sb_constant'] * p['emissivity_tube'] * (
(p['temp_boundary']**4) - (p['temp_outside_ambient']**4))
#P = A * (P/A)
u['q_rad_tot'] = u['area_rad'] * u['q_rad_per_area']
#------------
#Sum Up
u['q_total_out'] = u['q_rad_tot'] + u['total_q_nat_conv']
u['q_total_in'] = u['q_total_solar'] + u['total_heat_rate_pods']
u['ss_temp_residual'] = (u['q_total_out'] - u['q_total_in']) / 1e6
# print("u['ss_temp_residual'] ", u['ss_temp_residual'])
# print("temp boundary", p['temp_boundary'])
class TubeTemp(Group):
"""An Assembly that computes Steady State temp"""
def __init__(self):
super(TubeTemp, self).__init__()
self.add('tm', TubeWallTemp(), promotes=[
'length_tube','tube_area','tube_thickness','num_pods',
'nozzle_air_W','nozzle_air_Tt'])
self.add('tmp_balance', TempBalance(), promotes=['temp_boundary'])
#self.add('nozzle_air', FlowStart(thermo_data=janaf, elements=AIR_MIX))
#self.add('bearing_air', FlowStart(thermo_data=janaf, elements=AIR_MIX))
#self.connect("nozzle_air.Fl_O:tot:T", "tm.nozzle_air_Tt")
#self.connect("nozzle_air.Fl_O:tot:Cp", "tm.nozzle_air_Cp")
#self.connect("nozzle_air.Fl_O:stat:W", "tm.nozzle_air_W")
self.connect('tm.ss_temp_residual', 'tmp_balance.ss_temp_residual')
self.connect('temp_boundary', 'tm.temp_boundary')
self.nl_solver = Newton()
self.nl_solver.options['atol'] = 1e-5
self.nl_solver.options['iprint'] = 1
self.nl_solver.options['rtol'] = 1e-5
self.nl_solver.options['maxiter'] = 50
self.ln_solver = ScipyGMRES()
self.ln_solver.options['atol'] = 1e-6
self.ln_solver.options['maxiter'] = 100
self.ln_solver.options['restart'] = 100
#run stand-alone component
if __name__ == "__main__":
from openmdao.api import Problem
prob = Problem()
prob.root = Group()
prob.root.add('tt', TubeTemp())
params = (('P', 0.3, {'units': 'psi'}), ('T', 1500.0, {'units': 'degR'}),
('W', 1.0, {'units': 'lbm/s'}), ('Cp', 0.24, {'units': 'Btu/(lbm*degF)'}))
dvars = (
('tube_area',3.9057), #desc='Tube out diameter' #7.3ft
('tube_thickness',.05),
('length_tube',
482803.), #desc='Length of entire Hyperloop') #300 miles, 1584000ft
('num_pods',
34.), #desc='Number of Pods in the Tube at a given time') #
('temp_boundary', 340.), #desc='Average Temperature of the tube') #
('temp_outside_ambient', 305.6
) #desc='Average Temperature of the outside air
)
#nozzle
prob.root.add('des_vars', IndepVarComp(params))
#bearings
prob.root.add('des_vars2', IndepVarComp(params))
#tube inputs
prob.root.add('vars', IndepVarComp(dvars))
prob.root.connect('des_vars.T', 'tt.nozzle_air_Tt')
prob.root.connect('des_vars.W', 'tt.nozzle_air_W')
prob.root.connect('vars.tube_area', 'tt.tube_area')
prob.root.connect('vars.tube_thickness', 'tt.tube_thickness')
prob.root.connect('vars.length_tube', 'tt.length_tube')
prob.root.connect('vars.num_pods', 'tt.num_pods')
prob.root.connect('vars.temp_outside_ambient','tt.tm.temp_outside_ambient')
prob.setup()
prob.root.list_connections()
prob['des_vars.T'] = 1710.0
prob['des_vars.P'] = 0.304434211
prob['des_vars.W'] = 1.08
prob['des_vars.Cp'] = 0.24
prob.run()
print("temp_boundary: ", prob['tt.tm.temp_boundary'])
print("temp_resid: ", prob['tt.tm.ss_temp_residual'])
print("tt.nozzle_air_Tt", prob['tt.'])
# print "-----Completed Tube Heat Flux Model Calculations---"
# print ""
# print "CompressQ-{} SolarQ-{} RadQ-{} ConvecQ-{}".format(test.tm.total_heat_rate_pods, test.tm.q_total_solar, test.tm.q_rad_tot, test.tm.total_q_nat_conv )
# print "Equilibrium Wall Temperature: {} K or {} F".format(tesparams['temp_boundary'], cu(tesparams['temp_boundary'],'degK','degF'))
# print "Ambient Temperature: {} K or {} F".format(test.tm.temp_outside_ambient, cu(test.tm.temp_outside_ambient,'degK','degF'))
# print "Q Out = {} W ==> Q In = {} W ==> Error: {}%".format(test.tm.q_total_out,test.tm.q_total_in,((test.tm.q_total_out-test.tm.q_total_in)/test.tm.q_total_out)*100)
| apache-2.0 |
jaruba/chromium.src | sync/tools/testserver/chromiumsync.py | 18 | 65492 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An implementation of the server side of the Chromium sync protocol.
The details of the protocol are described mostly by comments in the protocol
buffer definition at chrome/browser/sync/protocol/sync.proto.
"""
import base64
import cgi
import copy
import google.protobuf.text_format
import hashlib
import operator
import pickle
import random
import string
import sys
import threading
import time
import urlparse
import uuid
import app_list_specifics_pb2
import app_notification_specifics_pb2
import app_setting_specifics_pb2
import app_specifics_pb2
import article_specifics_pb2
import autofill_specifics_pb2
import bookmark_specifics_pb2
import client_commands_pb2
import dictionary_specifics_pb2
import get_updates_caller_info_pb2
import extension_setting_specifics_pb2
import extension_specifics_pb2
import favicon_image_specifics_pb2
import favicon_tracking_specifics_pb2
import history_delete_directive_specifics_pb2
import managed_user_setting_specifics_pb2
import managed_user_specifics_pb2
import managed_user_shared_setting_specifics_pb2
import managed_user_whitelist_specifics_pb2
import nigori_specifics_pb2
import password_specifics_pb2
import preference_specifics_pb2
import priority_preference_specifics_pb2
import search_engine_specifics_pb2
import session_specifics_pb2
import sync_pb2
import sync_enums_pb2
import synced_notification_app_info_specifics_pb2
import synced_notification_specifics_pb2
import theme_specifics_pb2
import typed_url_specifics_pb2
import wifi_credential_specifics_pb2
# An enumeration of the various kinds of data that can be synced.
# Over the wire, this enumeration is not used: a sync object's type is
# inferred by which EntitySpecifics field it has. But in the context
# of a program, it is useful to have an enumeration.
ALL_TYPES = (
TOP_LEVEL, # The type of the 'Google Chrome' folder.
APPS,
APP_LIST,
APP_NOTIFICATION,
APP_SETTINGS,
ARTICLE,
AUTOFILL,
AUTOFILL_PROFILE,
BOOKMARK,
DEVICE_INFO,
DICTIONARY,
EXPERIMENTS,
EXTENSIONS,
HISTORY_DELETE_DIRECTIVE,
MANAGED_USER_SETTING,
MANAGED_USER_SHARED_SETTING,
MANAGED_USER_WHITELIST,
MANAGED_USER,
NIGORI,
PASSWORD,
PREFERENCE,
PRIORITY_PREFERENCE,
SEARCH_ENGINE,
SESSION,
SYNCED_NOTIFICATION,
SYNCED_NOTIFICATION_APP_INFO,
THEME,
TYPED_URL,
EXTENSION_SETTINGS,
FAVICON_IMAGES,
FAVICON_TRACKING,
WIFI_CREDENTIAL) = range(32)
# An enumeration on the frequency at which the server should send errors
# to the client. This would be specified by the url that triggers the error.
# Note: This enum should be kept in the same order as the enum in sync_test.h.
SYNC_ERROR_FREQUENCY = (
ERROR_FREQUENCY_NONE,
ERROR_FREQUENCY_ALWAYS,
ERROR_FREQUENCY_TWO_THIRDS) = range(3)
# Well-known server tag of the top level 'Google Chrome' folder.
TOP_LEVEL_FOLDER_TAG = 'google_chrome'
# Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
# to that datatype. Note that TOP_LEVEL has no such token.
SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
SYNC_TYPE_TO_DESCRIPTOR = {
APP_LIST: SYNC_TYPE_FIELDS['app_list'],
APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
APPS: SYNC_TYPE_FIELDS['app'],
ARTICLE: SYNC_TYPE_FIELDS['article'],
AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
MANAGED_USER_SHARED_SETTING:
SYNC_TYPE_FIELDS['managed_user_shared_setting'],
MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
MANAGED_USER_WHITELIST: SYNC_TYPE_FIELDS['managed_user_whitelist'],
MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
NIGORI: SYNC_TYPE_FIELDS['nigori'],
PASSWORD: SYNC_TYPE_FIELDS['password'],
PREFERENCE: SYNC_TYPE_FIELDS['preference'],
PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
SESSION: SYNC_TYPE_FIELDS['session'],
SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
SYNCED_NOTIFICATION_APP_INFO:
SYNC_TYPE_FIELDS["synced_notification_app_info"],
THEME: SYNC_TYPE_FIELDS['theme'],
TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
WIFI_CREDENTIAL: SYNC_TYPE_FIELDS["wifi_credential"],
}
# The parent ID used to indicate a top-level node.
ROOT_ID = '0'
# Unix time epoch +1 day in struct_time format. The tuple corresponds to
# UTC Thursday Jan 2 1970, 00:00:00, non-dst.
# We have to add one day after start of epoch, since in timezones with positive
# UTC offset time.mktime throws an OverflowError,
# rather then returning negative number.
FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
ONE_DAY_SECONDS = 60 * 60 * 24
# The number of characters in the server-generated encryption key.
KEYSTORE_KEY_LENGTH = 16
# The hashed client tags for some experiment nodes.
KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
class Error(Exception):
"""Error class for this module."""
class ProtobufDataTypeFieldNotUnique(Error):
"""An entry should not have more than one data type present."""
class DataTypeIdNotRecognized(Error):
"""The requested data type is not recognized."""
class MigrationDoneError(Error):
"""A server-side migration occurred; clients must re-sync some datatypes.
Attributes:
datatypes: a list of the datatypes (python enum) needing migration.
"""
def __init__(self, datatypes):
self.datatypes = datatypes
class StoreBirthdayError(Error):
"""The client sent a birthday that doesn't correspond to this server."""
class TransientError(Error):
"""The client would be sent a transient error."""
class SyncInducedError(Error):
"""The client would be sent an error."""
class InducedErrorFrequencyNotDefined(Error):
"""The error frequency defined is not handled."""
class ClientNotConnectedError(Error):
"""The client is not connected to the server."""
def GetEntryType(entry):
"""Extract the sync type from a SyncEntry.
Args:
entry: A SyncEntity protobuf object whose type to determine.
Returns:
An enum value from ALL_TYPES if the entry's type can be determined, or None
if the type cannot be determined.
Raises:
ProtobufDataTypeFieldNotUnique: More than one type was indicated by
the entry.
"""
if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
return TOP_LEVEL
entry_types = GetEntryTypesFromSpecifics(entry.specifics)
if not entry_types:
return None
# If there is more than one, either there's a bug, or else the caller
# should use GetEntryTypes.
if len(entry_types) > 1:
raise ProtobufDataTypeFieldNotUnique
return entry_types[0]
def GetEntryTypesFromSpecifics(specifics):
"""Determine the sync types indicated by an EntitySpecifics's field(s).
If the specifics have more than one recognized data type field (as commonly
happens with the requested_types field of GetUpdatesMessage), all types
will be returned. Callers must handle the possibility of the returned
value having more than one item.
Args:
specifics: A EntitySpecifics protobuf message whose extensions to
enumerate.
Returns:
A list of the sync types (values from ALL_TYPES) associated with each
recognized extension of the specifics message.
"""
return [data_type for data_type, field_descriptor
in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
if specifics.HasField(field_descriptor.name)]
def SyncTypeToProtocolDataTypeId(data_type):
"""Convert from a sync type (python enum) to the protocol's data type id."""
return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
"""Convert from the protocol's data type id to a sync type (python enum)."""
for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
if field_descriptor.number == protocol_data_type_id:
return data_type
raise DataTypeIdNotRecognized
def DataTypeStringToSyncTypeLoose(data_type_string):
"""Converts a human-readable string to a sync type (python enum).
Capitalization and pluralization don't matter; this function is appropriate
for values that might have been typed by a human being; e.g., command-line
flags or query parameters.
"""
if data_type_string.isdigit():
return ProtocolDataTypeIdToSyncType(int(data_type_string))
name = data_type_string.lower().rstrip('s')
for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
if field_descriptor.name.lower().rstrip('s') == name:
return data_type
raise DataTypeIdNotRecognized
def MakeNewKeystoreKey():
"""Returns a new random keystore key."""
return ''.join(random.choice(string.ascii_uppercase + string.digits)
for x in xrange(KEYSTORE_KEY_LENGTH))
def SyncTypeToString(data_type):
"""Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
def CallerInfoToString(caller_info_source):
"""Formats a GetUpdatesSource enum value to a readable string."""
return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
.DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
.values_by_number[caller_info_source].name
def ShortDatatypeListSummary(data_types):
"""Formats compactly a list of sync types (python enums) for human eyes.
This function is intended for use by logging. If the list of datatypes
contains almost all of the values, the return value will be expressed
in terms of the datatypes that aren't set.
"""
included = set(data_types) - set([TOP_LEVEL])
if not included:
return 'nothing'
excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
if not excluded:
return 'everything'
simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
all_but_text = 'all except %s' % (
'+'.join(sorted([SyncTypeToString(x) for x in excluded])))
if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
return simple_text
else:
return all_but_text
def GetDefaultEntitySpecifics(data_type):
"""Get an EntitySpecifics having a sync type's default field value."""
specifics = sync_pb2.EntitySpecifics()
if data_type in SYNC_TYPE_TO_DESCRIPTOR:
descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
getattr(specifics, descriptor.name).SetInParent()
return specifics
class PermanentItem(object):
"""A specification of one server-created permanent item.
Attributes:
tag: A known-to-the-client value that uniquely identifies a server-created
permanent item.
name: The human-readable display name for this item.
parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
a top-level item. Otherwise, this must be the tag value of some other
server-created permanent item.
sync_type: A value from ALL_TYPES, giving the datatype of this permanent
item. This controls which types of client GetUpdates requests will
cause the permanent item to be created and returned.
create_by_default: Whether the permanent item is created at startup or not.
This value is set to True in the default case. Non-default permanent items
are those that are created only when a client explicitly tells the server
to do so.
"""
def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
self.tag = tag
self.name = name
self.parent_tag = parent_tag
self.sync_type = sync_type
self.create_by_default = create_by_default
class MigrationHistory(object):
"""A record of the migration events associated with an account.
Each migration event invalidates one or more datatypes on all clients
that had synced the datatype before the event. Such clients will continue
to receive MigrationDone errors until they throw away their progress and
re-sync that datatype from the beginning.
"""
def __init__(self):
self._migrations = {}
for datatype in ALL_TYPES:
self._migrations[datatype] = [1]
self._next_migration_version = 2
def GetLatestVersion(self, datatype):
return self._migrations[datatype][-1]
def CheckAllCurrent(self, versions_map):
"""Raises an error if any the provided versions are out of date.
This function intentionally returns migrations in the order that they were
triggered. Doing it this way allows the client to queue up two migrations
in a row, so the second one is received while responding to the first.
Arguments:
version_map: a map whose keys are datatypes and whose values are versions.
Raises:
MigrationDoneError: if a mismatch is found.
"""
problems = {}
for datatype, client_migration in versions_map.iteritems():
for server_migration in self._migrations[datatype]:
if client_migration < server_migration:
problems.setdefault(server_migration, []).append(datatype)
if problems:
raise MigrationDoneError(problems[min(problems.keys())])
def Bump(self, datatypes):
"""Add a record of a migration, to cause errors on future requests."""
for idx, datatype in enumerate(datatypes):
self._migrations[datatype].append(self._next_migration_version)
self._next_migration_version += 1
class UpdateSieve(object):
"""A filter to remove items the client has already seen."""
def __init__(self, request, migration_history=None):
self._original_request = request
self._state = {}
self._migration_history = migration_history or MigrationHistory()
self._migration_versions_to_check = {}
if request.from_progress_marker:
for marker in request.from_progress_marker:
data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
if marker.HasField('timestamp_token_for_migration'):
timestamp = marker.timestamp_token_for_migration
if timestamp:
self._migration_versions_to_check[data_type] = 1
elif marker.token:
(timestamp, version) = pickle.loads(marker.token)
self._migration_versions_to_check[data_type] = version
elif marker.HasField('token'):
timestamp = 0
else:
raise ValueError('No timestamp information in progress marker.')
data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
self._state[data_type] = timestamp
elif request.HasField('from_timestamp'):
for data_type in GetEntryTypesFromSpecifics(request.requested_types):
self._state[data_type] = request.from_timestamp
self._migration_versions_to_check[data_type] = 1
if self._state:
self._state[TOP_LEVEL] = min(self._state.itervalues())
def SummarizeRequest(self):
timestamps = {}
for data_type, timestamp in self._state.iteritems():
if data_type == TOP_LEVEL:
continue
timestamps.setdefault(timestamp, []).append(data_type)
return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
for stamp, types in sorted(timestamps.iteritems()))
def CheckMigrationState(self):
self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
def ClientWantsItem(self, item):
"""Return true if the client hasn't already seen an item."""
return self._state.get(GetEntryType(item), sys.maxint) < item.version
def HasAnyTimestamp(self):
"""Return true if at least one datatype was requested."""
return bool(self._state)
def GetMinTimestamp(self):
"""Return true the smallest timestamp requested across all datatypes."""
return min(self._state.itervalues())
def GetFirstTimeTypes(self):
"""Return a list of datatypes requesting updates from timestamp zero."""
return [datatype for datatype, timestamp in self._state.iteritems()
if timestamp == 0]
def GetCreateMobileBookmarks(self):
"""Return true if the client has requested to create the 'Mobile Bookmarks'
folder.
"""
return (self._original_request.HasField('create_mobile_bookmarks_folder')
and self._original_request.create_mobile_bookmarks_folder)
def SaveProgress(self, new_timestamp, get_updates_response):
"""Write the new_timestamp or new_progress_marker fields to a response."""
if self._original_request.from_progress_marker:
for data_type, old_timestamp in self._state.iteritems():
if data_type == TOP_LEVEL:
continue
new_marker = sync_pb2.DataTypeProgressMarker()
new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
final_stamp = max(old_timestamp, new_timestamp)
final_migration = self._migration_history.GetLatestVersion(data_type)
new_marker.token = pickle.dumps((final_stamp, final_migration))
get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
elif self._original_request.HasField('from_timestamp'):
if self._original_request.from_timestamp < new_timestamp:
get_updates_response.new_timestamp = new_timestamp
class SyncDataModel(object):
"""Models the account state of one sync user."""
_BATCH_SIZE = 100
# Specify all the permanent items that a model might need.
_PERMANENT_ITEM_SPECS = [
PermanentItem('google_chrome_apps', name='Apps',
parent_tag=ROOT_ID, sync_type=APPS),
PermanentItem('google_chrome_app_list', name='App List',
parent_tag=ROOT_ID, sync_type=APP_LIST),
PermanentItem('google_chrome_app_notifications', name='App Notifications',
parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
PermanentItem('google_chrome_app_settings',
name='App Settings',
parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
PermanentItem('google_chrome_bookmarks', name='Bookmarks',
parent_tag=ROOT_ID, sync_type=BOOKMARK),
PermanentItem('bookmark_bar', name='Bookmark Bar',
parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
PermanentItem('other_bookmarks', name='Other Bookmarks',
parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
PermanentItem('synced_bookmarks', name='Synced Bookmarks',
parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
create_by_default=False),
PermanentItem('google_chrome_autofill', name='Autofill',
parent_tag=ROOT_ID, sync_type=AUTOFILL),
PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
PermanentItem('google_chrome_device_info', name='Device Info',
parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
PermanentItem('google_chrome_experiments', name='Experiments',
parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
PermanentItem('google_chrome_extension_settings',
name='Extension Settings',
parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
PermanentItem('google_chrome_extensions', name='Extensions',
parent_tag=ROOT_ID, sync_type=EXTENSIONS),
PermanentItem('google_chrome_history_delete_directives',
name='History Delete Directives',
parent_tag=ROOT_ID,
sync_type=HISTORY_DELETE_DIRECTIVE),
PermanentItem('google_chrome_favicon_images',
name='Favicon Images',
parent_tag=ROOT_ID,
sync_type=FAVICON_IMAGES),
PermanentItem('google_chrome_favicon_tracking',
name='Favicon Tracking',
parent_tag=ROOT_ID,
sync_type=FAVICON_TRACKING),
PermanentItem('google_chrome_managed_user_settings',
name='Managed User Settings',
parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
PermanentItem('google_chrome_managed_users',
name='Managed Users',
parent_tag=ROOT_ID, sync_type=MANAGED_USER),
PermanentItem('google_chrome_managed_user_shared_settings',
name='Managed User Shared Settings',
parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
PermanentItem('google_chrome_managed_user_whitelists',
name='Managed User Whitelists', parent_tag=ROOT_ID,
sync_type=MANAGED_USER_WHITELIST),
PermanentItem('google_chrome_nigori', name='Nigori',
parent_tag=ROOT_ID, sync_type=NIGORI),
PermanentItem('google_chrome_passwords', name='Passwords',
parent_tag=ROOT_ID, sync_type=PASSWORD),
PermanentItem('google_chrome_preferences', name='Preferences',
parent_tag=ROOT_ID, sync_type=PREFERENCE),
PermanentItem('google_chrome_priority_preferences',
name='Priority Preferences',
parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
PermanentItem('google_chrome_synced_notifications',
name='Synced Notifications',
parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
PermanentItem('google_chrome_synced_notification_app_info',
name='Synced Notification App Info',
parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO),
PermanentItem('google_chrome_search_engines', name='Search Engines',
parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
PermanentItem('google_chrome_sessions', name='Sessions',
parent_tag=ROOT_ID, sync_type=SESSION),
PermanentItem('google_chrome_themes', name='Themes',
parent_tag=ROOT_ID, sync_type=THEME),
PermanentItem('google_chrome_typed_urls', name='Typed URLs',
parent_tag=ROOT_ID, sync_type=TYPED_URL),
PermanentItem('google_chrome_wifi_credentials', name='WiFi Credentials',
parent_tag=ROOT_ID, sync_type=WIFI_CREDENTIAL),
PermanentItem('google_chrome_dictionary', name='Dictionary',
parent_tag=ROOT_ID, sync_type=DICTIONARY),
PermanentItem('google_chrome_articles', name='Articles',
parent_tag=ROOT_ID, sync_type=ARTICLE),
]
def __init__(self):
# Monotonically increasing version number. The next object change will
# take on this value + 1.
self._version = 0
# The definitive copy of this client's items: a map from ID string to a
# SyncEntity protocol buffer.
self._entries = {}
self.ResetStoreBirthday()
self.migration_history = MigrationHistory()
self.induced_error = sync_pb2.ClientToServerResponse.Error()
self.induced_error_frequency = 0
self.sync_count_before_errors = 0
self.acknowledge_managed_users = False
self._keys = [MakeNewKeystoreKey()]
def _SaveEntry(self, entry):
"""Insert or update an entry in the change log, and give it a new version.
The ID fields of this entry are assumed to be valid server IDs. This
entry will be updated with a new version number and sync_timestamp.
Args:
entry: The entry to be added or updated.
"""
self._version += 1
# Maintain a global (rather than per-item) sequence number and use it
# both as the per-entry version as well as the update-progress timestamp.
# This simulates the behavior of the original server implementation.
entry.version = self._version
entry.sync_timestamp = self._version
# Preserve the originator info, which the client is not required to send
# when updating.
base_entry = self._entries.get(entry.id_string)
if base_entry:
entry.originator_cache_guid = base_entry.originator_cache_guid
entry.originator_client_item_id = base_entry.originator_client_item_id
self._entries[entry.id_string] = copy.deepcopy(entry)
def _ServerTagToId(self, tag):
"""Determine the server ID from a server-unique tag.
The resulting value is guaranteed not to collide with the other ID
generation methods.
Args:
tag: The unique, known-to-the-client tag of a server-generated item.
Returns:
The string value of the computed server ID.
"""
if not tag or tag == ROOT_ID:
return tag
spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
def _TypeToTypeRootId(self, model_type):
"""Returns the server ID for the type root node of the given type."""
tag = [x.tag for x in self._PERMANENT_ITEM_SPECS
if x.sync_type == model_type][0]
return self._ServerTagToId(tag)
def _ClientTagToId(self, datatype, tag):
"""Determine the server ID from a client-unique tag.
The resulting value is guaranteed not to collide with the other ID
generation methods.
Args:
datatype: The sync type (python enum) of the identified object.
tag: The unique, opaque-to-the-server tag of a client-tagged item.
Returns:
The string value of the computed server ID.
"""
return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
def _ClientIdToId(self, datatype, client_guid, client_item_id):
"""Compute a unique server ID from a client-local ID tag.
The resulting value is guaranteed not to collide with the other ID
generation methods.
Args:
datatype: The sync type (python enum) of the identified object.
client_guid: A globally unique ID that identifies the client which
created this item.
client_item_id: An ID that uniquely identifies this item on the client
which created it.
Returns:
The string value of the computed server ID.
"""
# Using the client ID info is not required here (we could instead generate
# a random ID), but it's useful for debugging.
return self._MakeCurrentId(datatype,
'<server ID originally>%s/%s' % (client_guid, client_item_id))
def _MakeCurrentId(self, datatype, inner_id):
return '%d^%d^%s' % (datatype,
self.migration_history.GetLatestVersion(datatype),
inner_id)
def _ExtractIdInfo(self, id_string):
if not id_string or id_string == ROOT_ID:
return None
datatype_string, separator, remainder = id_string.partition('^')
migration_version_string, separator, inner_id = remainder.partition('^')
return (int(datatype_string), int(migration_version_string), inner_id)
def _WritePosition(self, entry, parent_id):
"""Ensure the entry has an absolute, numeric position and parent_id.
Historically, clients would specify positions using the predecessor-based
references in the insert_after_item_id field; starting July 2011, this
was changed and Chrome now sends up the absolute position. The server
must store a position_in_parent value and must not maintain
insert_after_item_id.
Starting in Jan 2013, the client will also send up a unique_position field
which should be saved and returned on subsequent GetUpdates.
Args:
entry: The entry for which to write a position. Its ID field are
assumed to be server IDs. This entry will have its parent_id_string,
position_in_parent and unique_position fields updated; its
insert_after_item_id field will be cleared.
parent_id: The ID of the entry intended as the new parent.
"""
entry.parent_id_string = parent_id
if not entry.HasField('position_in_parent'):
entry.position_in_parent = 1337 # A debuggable, distinctive default.
entry.ClearField('insert_after_item_id')
def _ItemExists(self, id_string):
"""Determine whether an item exists in the changelog."""
return id_string in self._entries
def _CreatePermanentItem(self, spec):
"""Create one permanent item from its spec, if it doesn't exist.
The resulting item is added to the changelog.
Args:
spec: A PermanentItem object holding the properties of the item to create.
"""
id_string = self._ServerTagToId(spec.tag)
if self._ItemExists(id_string):
return
print 'Creating permanent item: %s' % spec.name
entry = sync_pb2.SyncEntity()
entry.id_string = id_string
entry.non_unique_name = spec.name
entry.name = spec.name
entry.server_defined_unique_tag = spec.tag
entry.folder = True
entry.deleted = False
entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
self._SaveEntry(entry)
def _CreateDefaultPermanentItems(self, requested_types):
"""Ensure creation of all default permanent items for a given set of types.
Args:
requested_types: A list of sync data types from ALL_TYPES.
All default permanent items of only these types will be created.
"""
for spec in self._PERMANENT_ITEM_SPECS:
if spec.sync_type in requested_types and spec.create_by_default:
self._CreatePermanentItem(spec)
def ResetStoreBirthday(self):
"""Resets the store birthday to a random value."""
# TODO(nick): uuid.uuid1() is better, but python 2.5 only.
self.store_birthday = '%0.30f' % random.random()
def StoreBirthday(self):
"""Gets the store birthday."""
return self.store_birthday
def GetChanges(self, sieve):
"""Get entries which have changed, oldest first.
The returned entries are limited to being _BATCH_SIZE many. The entries
are returned in strict version order.
Args:
sieve: An update sieve to use to filter out updates the client
has already seen.
Returns:
A tuple of (version, entries, changes_remaining). Version is a new
timestamp value, which should be used as the starting point for the
next query. Entries is the batch of entries meeting the current
timestamp query. Changes_remaining indicates the number of changes
left on the server after this batch.
"""
if not sieve.HasAnyTimestamp():
return (0, [], 0)
min_timestamp = sieve.GetMinTimestamp()
first_time_types = sieve.GetFirstTimeTypes()
self._CreateDefaultPermanentItems(first_time_types)
# Mobile bookmark folder is not created by default, create it only when
# client requested it.
if (sieve.GetCreateMobileBookmarks() and
first_time_types.count(BOOKMARK) > 0):
self.TriggerCreateSyncedBookmarks()
self.TriggerAcknowledgeManagedUsers()
change_log = sorted(self._entries.values(),
key=operator.attrgetter('version'))
new_changes = [x for x in change_log if x.version > min_timestamp]
# Pick batch_size new changes, and then filter them. This matches
# the RPC behavior of the production sync server.
batch = new_changes[:self._BATCH_SIZE]
if not batch:
# Client is up to date.
return (min_timestamp, [], 0)
# Restrict batch to requested types. Tombstones are untyped
# and will always get included.
filtered = [copy.deepcopy(item) for item in batch
if item.deleted or sieve.ClientWantsItem(item)]
# The new client timestamp is the timestamp of the last item in the
# batch, even if that item was filtered out.
return (batch[-1].version, filtered, len(new_changes) - len(batch))
def GetKeystoreKeys(self):
"""Returns the encryption keys for this account."""
print "Returning encryption keys: %s" % self._keys
return self._keys
def _CopyOverImmutableFields(self, entry):
"""Preserve immutable fields by copying pre-commit state.
Args:
entry: A sync entity from the client.
"""
if entry.id_string in self._entries:
if self._entries[entry.id_string].HasField(
'server_defined_unique_tag'):
entry.server_defined_unique_tag = (
self._entries[entry.id_string].server_defined_unique_tag)
def _CheckVersionForCommit(self, entry):
"""Perform an optimistic concurrency check on the version number.
Clients are only allowed to commit if they report having seen the most
recent version of an object.
Args:
entry: A sync entity from the client. It is assumed that ID fields
have been converted to server IDs.
Returns:
A boolean value indicating whether the client's version matches the
newest server version for the given entry.
"""
if entry.id_string in self._entries:
# Allow edits/deletes if the version matches, and any undeletion.
return (self._entries[entry.id_string].version == entry.version or
self._entries[entry.id_string].deleted)
else:
# Allow unknown ID only if the client thinks it's new too.
return entry.version == 0
def _CheckParentIdForCommit(self, entry):
"""Check that the parent ID referenced in a SyncEntity actually exists.
Args:
entry: A sync entity from the client. It is assumed that ID fields
have been converted to server IDs.
Returns:
A boolean value indicating whether the entity's parent ID is an object
that actually exists (and is not deleted) in the current account state.
"""
if entry.parent_id_string == ROOT_ID:
# This is generally allowed.
return True
if (not entry.HasField('parent_id_string') and
entry.HasField('client_defined_unique_tag')):
return True # Unique client tag items do not need to specify a parent.
if entry.parent_id_string not in self._entries:
print 'Warning: Client sent unknown ID. Should never happen.'
return False
if entry.parent_id_string == entry.id_string:
print 'Warning: Client sent circular reference. Should never happen.'
return False
if self._entries[entry.parent_id_string].deleted:
# This can happen in a race condition between two clients.
return False
if not self._entries[entry.parent_id_string].folder:
print 'Warning: Client sent non-folder parent. Should never happen.'
return False
return True
def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
"""Convert ID fields in a client sync entry to server IDs.
A commit batch sent by a client may contain new items for which the
server has not generated IDs yet. And within a commit batch, later
items are allowed to refer to earlier items. This method will
generate server IDs for new items, as well as rewrite references
to items whose server IDs were generated earlier in the batch.
Args:
entry: The client sync entry to modify.
cache_guid: The globally unique ID of the client that sent this
commit request.
commit_session: A dictionary mapping the original IDs to the new server
IDs, for any items committed earlier in the batch.
"""
if entry.version == 0:
data_type = GetEntryType(entry)
if entry.HasField('client_defined_unique_tag'):
# When present, this should determine the item's ID.
new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
else:
new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
entry.originator_cache_guid = cache_guid
entry.originator_client_item_id = entry.id_string
commit_session[entry.id_string] = new_id # Remember the remapping.
entry.id_string = new_id
if entry.parent_id_string in commit_session:
entry.parent_id_string = commit_session[entry.parent_id_string]
if entry.insert_after_item_id in commit_session:
entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
def ValidateCommitEntries(self, entries):
"""Raise an exception if a commit batch contains any global errors.
Arguments:
entries: an iterable containing commit-form SyncEntity protocol buffers.
Raises:
MigrationDoneError: if any of the entries reference a recently-migrated
datatype.
"""
server_ids_in_commit = set()
local_ids_in_commit = set()
for entry in entries:
if entry.version:
server_ids_in_commit.add(entry.id_string)
else:
local_ids_in_commit.add(entry.id_string)
if entry.HasField('parent_id_string'):
if entry.parent_id_string not in local_ids_in_commit:
server_ids_in_commit.add(entry.parent_id_string)
versions_present = {}
for server_id in server_ids_in_commit:
parsed = self._ExtractIdInfo(server_id)
if parsed:
datatype, version, _ = parsed
versions_present.setdefault(datatype, []).append(version)
self.migration_history.CheckAllCurrent(
dict((k, min(v)) for k, v in versions_present.iteritems()))
def CommitEntry(self, entry, cache_guid, commit_session):
"""Attempt to commit one entry to the user's account.
Args:
entry: A SyncEntity protobuf representing desired object changes.
cache_guid: A string value uniquely identifying the client; this
is used for ID generation and will determine the originator_cache_guid
if the entry is new.
commit_session: A dictionary mapping client IDs to server IDs for any
objects committed earlier this session. If the entry gets a new ID
during commit, the change will be recorded here.
Returns:
A SyncEntity reflecting the post-commit value of the entry, or None
if the entry was not committed due to an error.
"""
entry = copy.deepcopy(entry)
# Generate server IDs for this entry, and write generated server IDs
# from earlier entries into the message's fields, as appropriate. The
# ID generation state is stored in 'commit_session'.
self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
# Sets the parent ID field for a client-tagged item. The client is allowed
# to not specify parents for these types of items. The server can figure
# out on its own what the parent ID for this entry should be.
self._RewriteParentIdForUniqueClientEntry(entry)
# Perform the optimistic concurrency check on the entry's version number.
# Clients are not allowed to commit unless they indicate that they've seen
# the most recent version of an object.
if not self._CheckVersionForCommit(entry):
return None
# Check the validity of the parent ID; it must exist at this point.
# TODO(nick): Implement cycle detection and resolution.
if not self._CheckParentIdForCommit(entry):
return None
self._CopyOverImmutableFields(entry);
# At this point, the commit is definitely going to happen.
# Deletion works by storing a limited record for an entry, called a
# tombstone. A sync server must track deleted IDs forever, since it does
# not keep track of client knowledge (there's no deletion ACK event).
if entry.deleted:
def MakeTombstone(id_string, datatype):
"""Make a tombstone entry that will replace the entry being deleted.
Args:
id_string: Index of the SyncEntity to be deleted.
Returns:
A new SyncEntity reflecting the fact that the entry is deleted.
"""
# Only the ID, version and deletion state are preserved on a tombstone.
tombstone = sync_pb2.SyncEntity()
tombstone.id_string = id_string
tombstone.deleted = True
tombstone.name = ''
tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
return tombstone
def IsChild(child_id):
"""Check if a SyncEntity is a child of entry, or any of its children.
Args:
child_id: Index of the SyncEntity that is a possible child of entry.
Returns:
True if it is a child; false otherwise.
"""
if child_id not in self._entries:
return False
if self._entries[child_id].parent_id_string == entry.id_string:
return True
return IsChild(self._entries[child_id].parent_id_string)
# Identify any children entry might have.
child_ids = [child.id_string for child in self._entries.itervalues()
if IsChild(child.id_string)]
# Mark all children that were identified as deleted.
for child_id in child_ids:
datatype = GetEntryType(self._entries[child_id])
self._SaveEntry(MakeTombstone(child_id, datatype))
# Delete entry itself.
datatype = GetEntryType(self._entries[entry.id_string])
entry = MakeTombstone(entry.id_string, datatype)
else:
# Comments in sync.proto detail how the representation of positional
# ordering works.
#
# We've almost fully deprecated the 'insert_after_item_id' field.
# The 'position_in_parent' field is also deprecated, but as of Jan 2013
# is still in common use. The 'unique_position' field is the latest
# and greatest in positioning technology.
#
# This server supports 'position_in_parent' and 'unique_position'.
self._WritePosition(entry, entry.parent_id_string)
# Preserve the originator info, which the client is not required to send
# when updating.
base_entry = self._entries.get(entry.id_string)
if base_entry and not entry.HasField('originator_cache_guid'):
entry.originator_cache_guid = base_entry.originator_cache_guid
entry.originator_client_item_id = base_entry.originator_client_item_id
# Store the current time since the Unix epoch in milliseconds.
entry.mtime = (int((time.mktime(time.gmtime()) -
(time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
# Commit the change. This also updates the version number.
self._SaveEntry(entry)
return entry
def _RewriteVersionInId(self, id_string):
"""Rewrites an ID so that its migration version becomes current."""
parsed_id = self._ExtractIdInfo(id_string)
if not parsed_id:
return id_string
datatype, old_migration_version, inner_id = parsed_id
return self._MakeCurrentId(datatype, inner_id)
def _RewriteParentIdForUniqueClientEntry(self, entry):
"""Sets the entry's parent ID field to the appropriate value.
The client must always set enough of the specifics of the entries it sends
up such that the server can identify its type. (See crbug.com/373859)
The client is under no obligation to set the parent ID field. The server
can always infer what the appropriate parent for this model type should be.
Having the client not send the parent ID is a step towards the removal of
type root nodes. (See crbug.com/373869)
This server implements these features by "faking" the existing of a parent
ID early on in the commit processing.
This function has no effect on non-client-tagged items.
"""
if not entry.HasField('client_defined_unique_tag'):
return # Skip this processing for non-client-tagged types.
data_type = GetEntryType(entry)
entry.parent_id_string = self._TypeToTypeRootId(data_type)
def TriggerMigration(self, datatypes):
"""Cause a migration to occur for a set of datatypes on this account.
Clients will see the MIGRATION_DONE error for these datatypes until they
resync them.
"""
versions_to_remap = self.migration_history.Bump(datatypes)
all_entries = self._entries.values()
self._entries.clear()
for entry in all_entries:
new_id = self._RewriteVersionInId(entry.id_string)
entry.id_string = new_id
if entry.HasField('parent_id_string'):
entry.parent_id_string = self._RewriteVersionInId(
entry.parent_id_string)
self._entries[entry.id_string] = entry
def TriggerSyncTabFavicons(self):
"""Set the 'sync_tab_favicons' field to this account's nigori node.
If the field is not currently set, will write a new nigori node entry
with the field set. Else does nothing.
"""
nigori_tag = "google_chrome_nigori"
nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
if (nigori_original.specifics.nigori.sync_tab_favicons):
return
nigori_new = copy.deepcopy(nigori_original)
nigori_new.specifics.nigori.sync_tabs = True
self._SaveEntry(nigori_new)
def TriggerCreateSyncedBookmarks(self):
"""Create the Synced Bookmarks folder under the Bookmarks permanent item.
Clients will then receive the Synced Bookmarks folder on future
GetUpdates, and new bookmarks can be added within the Synced Bookmarks
folder.
"""
synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
if spec.name == "Synced Bookmarks"]
self._CreatePermanentItem(synced_bookmarks_spec)
def TriggerEnableKeystoreEncryption(self):
"""Create the keystore_encryption experiment entity and enable it.
A new entity within the EXPERIMENTS datatype is created with the unique
client tag "keystore_encryption" if it doesn't already exist. The
keystore_encryption message is then filled with |enabled| set to true.
"""
experiment_id = self._ServerTagToId("google_chrome_experiments")
keystore_encryption_id = self._ClientTagToId(
EXPERIMENTS,
KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
keystore_entry = self._entries.get(keystore_encryption_id)
if keystore_entry is None:
keystore_entry = sync_pb2.SyncEntity()
keystore_entry.id_string = keystore_encryption_id
keystore_entry.name = "Keystore Encryption"
keystore_entry.client_defined_unique_tag = (
KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
keystore_entry.folder = False
keystore_entry.deleted = False
keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
self._WritePosition(keystore_entry, experiment_id)
keystore_entry.specifics.experiments.keystore_encryption.enabled = True
self._SaveEntry(keystore_entry)
def TriggerRotateKeystoreKeys(self):
"""Rotate the current set of keystore encryption keys.
|self._keys| will have a new random encryption key appended to it. We touch
the nigori node so that each client will receive the new encryption keys
only once.
"""
# Add a new encryption key.
self._keys += [MakeNewKeystoreKey(), ]
# Increment the nigori node's timestamp, so clients will get the new keys
# on their next GetUpdates (any time the nigori node is sent back, we also
# send back the keystore keys).
nigori_tag = "google_chrome_nigori"
self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
def TriggerAcknowledgeManagedUsers(self):
"""Set the "acknowledged" flag for any managed user entities that don't have
it set already.
"""
if not self.acknowledge_managed_users:
return
managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
if entry.specifics.HasField('managed_user')
and not entry.specifics.managed_user.acknowledged]
for user in managed_users:
user.specifics.managed_user.acknowledged = True
self._SaveEntry(user)
def TriggerEnablePreCommitGetUpdateAvoidance(self):
"""Sets the experiment to enable pre-commit GetUpdate avoidance."""
experiment_id = self._ServerTagToId("google_chrome_experiments")
pre_commit_gu_avoidance_id = self._ClientTagToId(
EXPERIMENTS,
PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
entry = self._entries.get(pre_commit_gu_avoidance_id)
if entry is None:
entry = sync_pb2.SyncEntity()
entry.id_string = pre_commit_gu_avoidance_id
entry.name = "Pre-commit GU avoidance"
entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
entry.folder = False
entry.deleted = False
entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
self._WritePosition(entry, experiment_id)
entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
self._SaveEntry(entry)
def SetInducedError(self, error, error_frequency,
sync_count_before_errors):
self.induced_error = error
self.induced_error_frequency = error_frequency
self.sync_count_before_errors = sync_count_before_errors
def GetInducedError(self):
return self.induced_error
def _GetNextVersionNumber(self):
"""Set the version to one more than the greatest version number seen."""
entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
if len(entries) < 1:
raise ClientNotConnectedError
return entries[-1].version + 1
class TestServer(object):
"""An object to handle requests for one (and only one) Chrome Sync account.
TestServer consumes the sync command messages that are the outermost
layers of the protocol, performs the corresponding actions on its
SyncDataModel, and constructs an appropriate response message.
"""
def __init__(self):
# The implementation supports exactly one account; its state is here.
self.account = SyncDataModel()
self.account_lock = threading.Lock()
# Clients that have talked to us: a map from the full client ID
# to its nickname.
self.clients = {}
self.client_name_generator = ('+' * times + chr(c)
for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
self.transient_error = False
self.sync_count = 0
# Gaia OAuth2 Token fields and their default values.
self.response_code = 200
self.request_token = 'rt1'
self.access_token = 'at1'
self.expires_in = 3600
self.token_type = 'Bearer'
# The ClientCommand to send back on each ServerToClientResponse. If set to
# None, no ClientCommand should be sent.
self._client_command = None
def GetShortClientName(self, query):
parsed = cgi.parse_qs(query[query.find('?')+1:])
client_id = parsed.get('client_id')
if not client_id:
return '?'
client_id = client_id[0]
if client_id not in self.clients:
self.clients[client_id] = self.client_name_generator.next()
return self.clients[client_id]
def CheckStoreBirthday(self, request):
"""Raises StoreBirthdayError if the request's birthday is a mismatch."""
if not request.HasField('store_birthday'):
return
if self.account.StoreBirthday() != request.store_birthday:
raise StoreBirthdayError
def CheckTransientError(self):
"""Raises TransientError if transient_error variable is set."""
if self.transient_error:
raise TransientError
def CheckSendError(self):
"""Raises SyncInducedError if needed."""
if (self.account.induced_error.error_type !=
sync_enums_pb2.SyncEnums.UNKNOWN):
# Always means return the given error for all requests.
if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
raise SyncInducedError
# This means the FIRST 2 requests of every 3 requests
# return an error. Don't switch the order of failures. There are
# test cases that rely on the first 2 being the failure rather than
# the last 2.
elif (self.account.induced_error_frequency ==
ERROR_FREQUENCY_TWO_THIRDS):
if (((self.sync_count -
self.account.sync_count_before_errors) % 3) != 0):
raise SyncInducedError
else:
raise InducedErrorFrequencyNotDefined
def HandleMigrate(self, path):
query = urlparse.urlparse(path)[4]
code = 200
self.account_lock.acquire()
try:
datatypes = [DataTypeStringToSyncTypeLoose(x)
for x in urlparse.parse_qs(query).get('type',[])]
if datatypes:
self.account.TriggerMigration(datatypes)
response = 'Migrated datatypes %s' % (
' and '.join(SyncTypeToString(x).upper() for x in datatypes))
else:
response = 'Please specify one or more <i>type=name</i> parameters'
code = 400
except DataTypeIdNotRecognized, error:
response = 'Could not interpret datatype name'
code = 400
finally:
self.account_lock.release()
return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
(code, code, response))
def HandleSetInducedError(self, path):
query = urlparse.urlparse(path)[4]
self.account_lock.acquire()
code = 200
response = 'Success'
error = sync_pb2.ClientToServerResponse.Error()
try:
error_type = urlparse.parse_qs(query)['error']
action = urlparse.parse_qs(query)['action']
error.error_type = int(error_type[0])
error.action = int(action[0])
try:
error.url = (urlparse.parse_qs(query)['url'])[0]
except KeyError:
error.url = ''
try:
error.error_description =(
(urlparse.parse_qs(query)['error_description'])[0])
except KeyError:
error.error_description = ''
try:
error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
except KeyError:
error_frequency = ERROR_FREQUENCY_ALWAYS
self.account.SetInducedError(error, error_frequency, self.sync_count)
response = ('Error = %d, action = %d, url = %s, description = %s' %
(error.error_type, error.action,
error.url,
error.error_description))
except error:
response = 'Could not parse url'
code = 400
finally:
self.account_lock.release()
return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
(code, code, response))
def HandleCreateBirthdayError(self):
self.account.ResetStoreBirthday()
return (
200,
'<html><title>Birthday error</title><H1>Birthday error</H1></html>')
def HandleSetTransientError(self):
self.transient_error = True
return (
200,
'<html><title>Transient error</title><H1>Transient error</H1></html>')
def HandleSetSyncTabFavicons(self):
"""Set 'sync_tab_favicons' field of the nigori node for this account."""
self.account.TriggerSyncTabFavicons()
return (
200,
'<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
def HandleCreateSyncedBookmarks(self):
"""Create the Synced Bookmarks folder under Bookmarks."""
self.account.TriggerCreateSyncedBookmarks()
return (
200,
'<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
def HandleEnableKeystoreEncryption(self):
"""Enables the keystore encryption experiment."""
self.account.TriggerEnableKeystoreEncryption()
return (
200,
'<html><title>Enable Keystore Encryption</title>'
'<H1>Enable Keystore Encryption</H1></html>')
def HandleRotateKeystoreKeys(self):
"""Rotate the keystore encryption keys."""
self.account.TriggerRotateKeystoreKeys()
return (
200,
'<html><title>Rotate Keystore Keys</title>'
'<H1>Rotate Keystore Keys</H1></html>')
def HandleEnableManagedUserAcknowledgement(self):
"""Enable acknowledging newly created managed users."""
self.account.acknowledge_managed_users = True
return (
200,
'<html><title>Enable Managed User Acknowledgement</title>'
'<h1>Enable Managed User Acknowledgement</h1></html>')
def HandleEnablePreCommitGetUpdateAvoidance(self):
"""Enables the pre-commit GU avoidance experiment."""
self.account.TriggerEnablePreCommitGetUpdateAvoidance()
return (
200,
'<html><title>Enable pre-commit GU avoidance</title>'
'<H1>Enable pre-commit GU avoidance</H1></html>')
def HandleCommand(self, query, raw_request):
"""Decode and handle a sync command from a raw input of bytes.
This is the main entry point for this class. It is safe to call this
method from multiple threads.
Args:
raw_request: An iterable byte sequence to be interpreted as a sync
protocol command.
Returns:
A tuple (response_code, raw_response); the first value is an HTTP
result code, while the second value is a string of bytes which is the
serialized reply to the command.
"""
self.account_lock.acquire()
self.sync_count += 1
def print_context(direction):
print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
__name__),
try:
request = sync_pb2.ClientToServerMessage()
request.MergeFromString(raw_request)
contents = request.message_contents
response = sync_pb2.ClientToServerResponse()
response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
if self._client_command:
response.client_command.CopyFrom(self._client_command)
self.CheckStoreBirthday(request)
response.store_birthday = self.account.store_birthday
self.CheckTransientError()
self.CheckSendError()
print_context('->')
if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
print 'Authenticate'
# We accept any authentication token, and support only one account.
# TODO(nick): Mock out the GAIA authentication as well; hook up here.
response.authenticate.user.email = 'syncjuser@chromium'
response.authenticate.user.display_name = 'Sync J User'
elif contents == sync_pb2.ClientToServerMessage.COMMIT:
print 'Commit %d item(s)' % len(request.commit.entries)
self.HandleCommit(request.commit, response.commit)
elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
print 'GetUpdates',
self.HandleGetUpdates(request.get_updates, response.get_updates)
print_context('<-')
print '%d update(s)' % len(response.get_updates.entries)
else:
print 'Unrecognizable sync request!'
return (400, None) # Bad request.
return (200, response.SerializeToString())
except MigrationDoneError, error:
print_context('<-')
print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
response = sync_pb2.ClientToServerResponse()
response.store_birthday = self.account.store_birthday
response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
response.migrated_data_type_id[:] = [
SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
return (200, response.SerializeToString())
except StoreBirthdayError, error:
print_context('<-')
print 'NOT_MY_BIRTHDAY'
response = sync_pb2.ClientToServerResponse()
response.store_birthday = self.account.store_birthday
response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
return (200, response.SerializeToString())
except TransientError, error:
### This is deprecated now. Would be removed once test cases are removed.
print_context('<-')
print 'TRANSIENT_ERROR'
response.store_birthday = self.account.store_birthday
response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
return (200, response.SerializeToString())
except SyncInducedError, error:
print_context('<-')
print 'INDUCED_ERROR'
response.store_birthday = self.account.store_birthday
error = self.account.GetInducedError()
response.error.error_type = error.error_type
response.error.url = error.url
response.error.error_description = error.error_description
response.error.action = error.action
return (200, response.SerializeToString())
finally:
self.account_lock.release()
def HandleCommit(self, commit_message, commit_response):
"""Respond to a Commit request by updating the user's account state.
Commit attempts stop after the first error, returning a CONFLICT result
for any unattempted entries.
Args:
commit_message: A sync_pb.CommitMessage protobuf holding the content
of the client's request.
commit_response: A sync_pb.CommitResponse protobuf into which a reply
to the client request will be written.
"""
commit_response.SetInParent()
batch_failure = False
session = {} # Tracks ID renaming during the commit operation.
guid = commit_message.cache_guid
self.account.ValidateCommitEntries(commit_message.entries)
for entry in commit_message.entries:
server_entry = None
if not batch_failure:
# Try to commit the change to the account.
server_entry = self.account.CommitEntry(entry, guid, session)
# An entryresponse is returned in both success and failure cases.
reply = commit_response.entryresponse.add()
if not server_entry:
reply.response_type = sync_pb2.CommitResponse.CONFLICT
reply.error_message = 'Conflict.'
batch_failure = True # One failure halts the batch.
else:
reply.response_type = sync_pb2.CommitResponse.SUCCESS
# These are the properties that the server is allowed to override
# during commit; the client wants to know their values at the end
# of the operation.
reply.id_string = server_entry.id_string
if not server_entry.deleted:
# Note: the production server doesn't actually send the
# parent_id_string on commit responses, so we don't either.
reply.position_in_parent = server_entry.position_in_parent
reply.version = server_entry.version
reply.name = server_entry.name
reply.non_unique_name = server_entry.non_unique_name
else:
reply.version = entry.version + 1
def HandleGetUpdates(self, update_request, update_response):
"""Respond to a GetUpdates request by querying the user's account.
Args:
update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
of the client's request.
update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
to the client request will be written.
"""
update_response.SetInParent()
update_sieve = UpdateSieve(update_request, self.account.migration_history)
print CallerInfoToString(update_request.caller_info.source),
print update_sieve.SummarizeRequest()
update_sieve.CheckMigrationState()
new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
update_response.changes_remaining = remaining
sending_nigori_node = False
for entry in entries:
if entry.name == 'Nigori':
sending_nigori_node = True
reply = update_response.entries.add()
reply.CopyFrom(entry)
update_sieve.SaveProgress(new_timestamp, update_response)
if update_request.need_encryption_key or sending_nigori_node:
update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
def HandleGetOauth2Token(self):
return (int(self.response_code),
'{\n'
' \"refresh_token\": \"' + self.request_token + '\",\n'
' \"access_token\": \"' + self.access_token + '\",\n'
' \"expires_in\": ' + str(self.expires_in) + ',\n'
' \"token_type\": \"' + self.token_type +'\"\n'
'}')
def HandleSetOauth2Token(self, response_code, request_token, access_token,
expires_in, token_type):
if response_code != 0:
self.response_code = response_code
if request_token != '':
self.request_token = request_token
if access_token != '':
self.access_token = access_token
if expires_in != 0:
self.expires_in = expires_in
if token_type != '':
self.token_type = token_type
return (200,
'<html><title>Set OAuth2 Token</title>'
'<H1>This server will now return the OAuth2 Token:</H1>'
'<p>response_code: ' + str(self.response_code) + '</p>'
'<p>request_token: ' + self.request_token + '</p>'
'<p>access_token: ' + self.access_token + '</p>'
'<p>expires_in: ' + str(self.expires_in) + '</p>'
'<p>token_type: ' + self.token_type + '</p>'
'</html>')
def CustomizeClientCommand(self, sessions_commit_delay_seconds):
"""Customizes the value of the ClientCommand of ServerToClientResponse.
Currently, this only allows for changing the sessions_commit_delay_seconds
field.
Args:
sessions_commit_delay_seconds: The desired sync delay time for sessions.
"""
if not self._client_command:
self._client_command = client_commands_pb2.ClientCommand()
self._client_command.sessions_commit_delay_seconds = \
sessions_commit_delay_seconds
return self._client_command
| bsd-3-clause |
T-J-Teru/synopsys-binutils-gdb | gdb/python/lib/gdb/command/prompt.py | 107 | 2135 | # Extended prompt.
# Copyright (C) 2011-2012 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GDB command for working with extended prompts."""
import gdb
import gdb.prompt
class _ExtendedPrompt(gdb.Parameter):
"""Set the extended prompt.
Usage: set extended-prompt VALUE
Substitutions are applied to VALUE to compute the real prompt.
The currently defined substitutions are:
"""
# Add the prompt library's dynamically generated help to the
# __doc__ string.
__doc__ = __doc__ + gdb.prompt.prompt_help()
set_doc = "Set the extended prompt."
show_doc = "Show the extended prompt."
def __init__(self):
super(_ExtendedPrompt, self).__init__("extended-prompt",
gdb.COMMAND_SUPPORT,
gdb.PARAM_STRING_NOESCAPE)
self.value = ''
self.hook_set = False
def get_show_string (self, pvalue):
if self.value is not '':
return "The extended prompt is: " + self.value
else:
return "The extended prompt is not set."
def get_set_string (self):
if self.hook_set == False:
gdb.prompt_hook = self.before_prompt_hook
self.hook_set = True
return ""
def before_prompt_hook(self, current):
if self.value is not '':
newprompt = gdb.prompt.substitute_prompt(self.value)
return newprompt.replace('\\', '\\\\')
else:
return None
_ExtendedPrompt()
| gpl-2.0 |
savi-dev/keystone | tests/test_ssl.py | 3 | 3769 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import httplib
import os
import ssl
from keystone import config
from keystone import test
CONF = config.CONF
CERTDIR = test.rootdir("examples/ssl/certs")
KEYDIR = test.rootdir("examples/ssl/private")
CERT = os.path.join(CERTDIR, 'keystone.pem')
KEY = os.path.join(KEYDIR, 'keystonekey.pem')
CA = os.path.join(CERTDIR, 'ca.pem')
CLIENT = os.path.join(CERTDIR, 'middleware.pem')
class SSLTestCase(test.TestCase):
def setUp(self):
super(SSLTestCase, self).setUp()
self.load_backends()
def test_1way_ssl_ok(self):
"""
Make sure both public and admin API work with 1-way SSL.
"""
self.public_server = self.serveapp('keystone', name='main',
cert=CERT, key=KEY, ca=CA)
self.admin_server = self.serveapp('keystone', name='admin',
cert=CERT, key=KEY, ca=CA)
# Verify Admin
conn = httplib.HTTPSConnection('127.0.0.1', CONF.admin_port)
conn.request('GET', '/')
resp = conn.getresponse()
self.assertEqual(resp.status, 300)
# Verify Public
conn = httplib.HTTPSConnection('127.0.0.1', CONF.public_port)
conn.request('GET', '/')
resp = conn.getresponse()
self.assertEqual(resp.status, 300)
def test_2way_ssl_ok(self):
"""
Make sure both public and admin API work with 2-way SSL. Requires
client certificate.
"""
self.public_server = self.serveapp(
'keystone', name='main', cert=CERT,
key=KEY, ca=CA, cert_required=True)
self.admin_server = self.serveapp(
'keystone', name='admin', cert=CERT,
key=KEY, ca=CA, cert_required=True)
# Verify Admin
conn = httplib.HTTPSConnection(
'127.0.0.1', CONF.admin_port, CLIENT, CLIENT)
conn.request('GET', '/')
resp = conn.getresponse()
self.assertEqual(resp.status, 300)
# Verify Public
conn = httplib.HTTPSConnection(
'127.0.0.1', CONF.public_port, CLIENT, CLIENT)
conn.request('GET', '/')
resp = conn.getresponse()
self.assertEqual(resp.status, 300)
def test_2way_ssl_fail(self):
"""
Expect to fail when client does not present proper certificate.
"""
self.public_server = self.serveapp(
'keystone', name='main', cert=CERT,
key=KEY, ca=CA, cert_required=True)
self.admin_server = self.serveapp(
'keystone', name='admin', cert=CERT,
key=KEY, ca=CA, cert_required=True)
# Verify Admin
conn = httplib.HTTPSConnection('127.0.0.1', CONF.admin_port)
try:
conn.request('GET', '/')
self.fail('Admin API shoulda failed with SSL handshake!')
except ssl.SSLError:
pass
# Verify Public
conn = httplib.HTTPSConnection('127.0.0.1', CONF.public_port)
try:
conn.request('GET', '/')
self.fail('Public API shoulda failed with SSL handshake!')
except ssl.SSLError:
pass
| apache-2.0 |
danyaqp/gr-baz | python/borip_server.py | 3 | 45491 | #!/usr/bin/env python
"""
BorIP Server
By Balint Seeber
Part of gr-baz (http://wiki.spench.net/wiki/gr-baz)
Protocol specification: http://wiki.spench.net/wiki/BorIP
"""
from __future__ import with_statement
import time, os, sys, threading, thread, traceback # , gc
from string import split, join
from optparse import OptionParser
import socket
import threading
import SocketServer
from gnuradio import gr
import baz
class server(gr.hier_block2): # Stand-alone block
def __init__(self, size=gr.sizeof_gr_complex, mul=1, server=False, parent=None, verbose=False, debug=False, show_commands=False): # 'parent' should be flowgraph in which this is the 'BorIP Sink' block
gr.hier_block2.__init__(self, "borip_server",
gr.io_signature(1, 1, size),
gr.io_signature(0, 0, 0))
src = self
udp_type_size = gr.sizeof_short
if size == gr.sizeof_gr_complex:
if mul != 1:
if verbose:
print "**> Applying constant multiplication:", mul
try: self.mul = blocks.multiply_const_cc(mul)
except: self.mul = gr.multiply_const_cc(mul)
self.connect(src, self.mul)
src = self.mul
self.c2is = gr.complex_to_interleaved_short()
self.connect(src, self.c2is)
src = self.c2is
elif (size == (2 * gr.sizeof_short)): # Short * 2 (vlen = 2)
udp_type_size = gr.sizeof_short * 2
elif not (size == gr.sizeof_short): # not IShort
raise Exception, "Invalid input size (must be gr_complex or interleaved short)"
self.sink = baz.udp_sink(udp_type_size, None, 28888, 1024, False, True)
self.connect(src, self.sink)
self.verbose = verbose
self.parent = None
if server:
self.parent = parent
self.server = borip_server(block=self)
self.server.options.verbose = verbose
self.server.options.debug = debug
self.server.options.command = show_commands
self.server.options.fixed_flowgraph = True
self.server.options.lock = True
self.server.start()
def set_status_msgq(self, msgq):
self.sink.set_status_msgq(msgq)
class ServerWrapper(gr.top_block): # Wrapper for hier_block with output pad
def __init__(self, flowgraph, options):
gr.top_block.__init__(self, "ServerWrapper for " + flowgraph.name())
self.flowgraph = flowgraph
self.options = options
mul = getattr(flowgraph, 'rescale', 1.0)
if hasattr(mul, '__call__'):
mul = mul()
self.server = server(size=self.flowgraph.output_signature().sizeof_stream_item(0), mul=mul, server=False, verbose=options.verbose)
self.connect((self.flowgraph, 0), (self.server, 0))
self.sink = self.server.sink
if hasattr(self.flowgraph, 'source'):
if options.verbose:
print "**> Using internal source:", self.flowgraph.source
self.source = self.flowgraph.source
if hasattr(self.flowgraph, 'status') and hasattr(self.flowgraph.status, 'msgq'):
#if isinstance(self.flowgraph.status, message_callback.message_callback): # Doesn't work, need to 'import baz' and prepend 'baz.'
if options.verbose:
print "**> Using status msgq from:", self.flowgraph.status
if hasattr(self.flowgraph.status.msgq, '__call__'):
msgq = self.flowgraph.status.msgq()
else:
msgq = self.flowgraph.status.msgq
self.server.sink.set_status_msgq(msgq)
def _default_device_hint_mapper(hint):
if hint is None or len(hint) == 0 or hint == "-":
return "usrp_uhd"
parts = hint.split(" ")
try:
idx = int(parts[0])
return "usrp_legacy"
except:
pass
try:
kv = parts[0].index('=') # key=value
parts = hint.split(",") # Assumes no use of "
args = []
subdev = None
for part in parts:
part = part.strip()
if len(part) == 1 and part[0].lower() >= 'a' and part[0].lower() <= 'z':
subdev = part + ":0"
continue
idx = part.find(':')
if idx > 0 and part[0].lower() >= 'a' and part[0].lower() <= 'z': # "?:"
subdev = part
continue
args += [part]
combined = ["addr=\"" + ",".join(args) + "\""]
if subdev is not None:
combined += ["subdev=\"" + subdev + "\""]
return {'module': "usrp_uhd", 'args': combined}
except:
pass
return parts[0].upper() # FCD, RTL
_device_hint_mapper = _default_device_hint_mapper
try:
import borip_server_devmap
_device_hint_mapper = borip_server_devmap.device_hint_mapper
except:
pass
class TuneResult():
def __init__(self, target_rf_freq=0.0, actual_rf_freq=0.0, target_dsp_freq=0.0, actual_dsp_freq=0.0):
self.target_rf_freq = target_rf_freq
self.actual_rf_freq = actual_rf_freq
self.target_dsp_freq = target_dsp_freq
self.actual_dsp_freq = actual_dsp_freq
def __str__(self):
return str(self.target_rf_freq) + ": " + str(self.actual_rf_freq) + " + " + str(self.target_dsp_freq) + " (" + str(self.actual_dsp_freq) + ")"
def duck_copy(self, src):
try:
elems = filter(lambda x: (x[0] != "_") and hasattr(self, x), dir(src))
map(lambda x: setattr(self, x, getattr(src, x)), elems)
#for v in elems:
# print v
# setattr(self, v, getattr(src, v))
return len(elems) > 0
except Exception, e:
print "~~> Failed to duck copy tune result:", src
traceback.print_exc()
class GainRange():
def __init__(self, start=0.0, stop=1.0, step=1.0):
self.start = start
self.stop = stop
self.step = step
class Device():
def __init__(self):
self._running = False
self._frequency = 0.0
self._frequency_requested = 0.0
self._gain = 0.0
self._sample_rate = 0.0
self._antenna = None
self._last_error = None
def is_running(self):
return self._running
def last_error(self):
return self._last_error
def start(self):
if self.is_running():
return True
self._running = True
return True
def stop(self):
if self.is_running() == False:
return True
self._running = False
return True
def open(self):
self._antenna = self.antennas()[0]
return True
def close(self):
if self.is_running():
self.stop()
def name(self):
return "(no name)"
def serial(self):
return self.name()
def gain_range(self):
return GainRange()
def master_clock(self):
return 0
def samples_per_packet(self):
return 1024
def antennas(self):
return ["(Default)"]
def gain(self, gain=None):
if gain is not None:
self._gain = gain
return True
return self._gain
def sample_rate(self, rate=None):
if rate is not None:
if rate <= 0:
return False
self._sample_rate = rate
return True
return self._sample_rate
def freq(self, freq=None, requested=None):
if freq is not None:
if freq > 0:
if requested is not None and requested > 0:
self._frequency_requested = requested
else:
self._frequency_requested = freq
self._frequency = freq
return True
else:
return False
return self._frequency
def was_tune_successful(self):
return 0
def last_tune_result(self):
return TuneResult(self._frequency_requested, self.freq())
def antenna(self, antenna=None):
if antenna is not None:
if type(antenna) == str:
if len(antenna) == 0:
return False
self._antenna = antenna
elif type(antenna) == int:
num = len(self.antennas())
if antenna < 0 or antenna >= num:
return False
self._antenna = self.antennas()[antenna]
else:
return False
return True
return self._antenna
#def set_antenna(self, antenna):
# return True
class NetworkTransport():
def __init__(self, default_port=28888):
self._header = True
self._default_port = default_port
self._destination = ("127.0.0.1", default_port)
self._payload_size = 0
def destination(self, dest=None):
if dest:
if type(dest) == str:
idx = dest.find(":")
if idx > -1:
try:
port = int(dest[idx+1:])
except:
return False
dest = dest[0:idx].strip()
if len(dest) == 0 or port <= 0:
return False
self._destination = (dest, port)
else:
self._destination = (dest, self._default_port)
elif type(dest) == tuple:
self._destination = dest
else:
return False
return True
return self._destination
def header(self, enable=None):
if enable is not None:
self._header = enable
return self._header
def payload_size(self,size=None):
if size is not None:
self._payload_size = size
return self._payload_size
def _delete_blocks(tb, done=[], verbose=False):
tb.disconnect_all()
for i in dir(tb):
#if i == "_tb": # Must delete this too!
# continue
obj = getattr(tb, i)
if str(obj) in done:
continue
delete = False
if issubclass(obj.__class__, gr.hier_block2):
if verbose:
print ">>> Descending:", i
_delete_blocks(obj, done + [str(obj)]) # Prevent self-referential loops
delete = True
if delete or '__swig_destroy__' in dir(obj):
done += [str(obj)]
if verbose:
print ">>> Deleting:", i
exec "del tb." + i
#while len(done) > 0: # Necessary
# del done[0]
class GnuRadioDevice(Device, NetworkTransport):
def __init__(self, flowgraph, options, sink=None, fixed_flowgraph=False):
Device.__init__(self)
NetworkTransport.__init__(self, options.port)
self.flowgraph = flowgraph
self.options = options
self.sink = sink
self.no_delete = fixed_flowgraph
self.no_stop = fixed_flowgraph
if fixed_flowgraph:
self._running = True
self._last_tune_result = None
def open(self): # Can raise exceptions
#try:
if self.sink is None:
if hasattr(self.flowgraph, 'sink') == False:
print "~~> Failed to find 'sink' block in flowgraph"
return False
self.sink = self.flowgraph.sink
payload_size = self.samples_per_packet() * 2 * 2 # short I/Q
max_payload_size = (65536 - 29) # Max UDP payload
max_payload_size -= (max_payload_size % 512)
if payload_size > max_payload_size:
print "!!> Restricting calculated payload size:", payload_size, "to maximum:", max_payload_size
payload_size = max_payload_size
self.payload_size(payload_size)
return Device.open(self)
#except Exception, e:
# print "Exception while initialising GNU Radio wrapper:", str(e)
#return False
def close(self):
Device.close(self)
if self.no_delete == False:
_delete_blocks(self.flowgraph, verbose=self.options.debug)
# Helpers
def _get_helper(self, names, fallback=None, _targets=[]):
if isinstance(names, str):
names = [names]
targets = _targets + ["", ".source", ".flowgraph"] # Second 'flowgraph' for 'ServerWrapper'
target = None
name = None
for n in names:
for t in targets:
#parts = t.split('.')
#accum = []
#next = False
#for part in parts:
# if hasattr(eval("self.flowgraph" + ".".join(accum)), part) == False:
# next = True
# break
# accum += [part]
#if next:
# continue
try:
if hasattr(eval("self.flowgraph" + t), n) == False:
#print " Not found:", t, "in:", n
continue
except AttributeError:
#print " AttributeError:", t, "in:", n
continue
#print " Found:", t, "in:", n
target = t
name = n
break
if target is not None:
#print " Using:", t, "in:", n
break
if target is None:
if self.options.debug:
print "##> Helper fallback:", names, fallback
return fallback
helper_name = "self.flowgraph" + target + "." + name
helper = eval(helper_name)
if self.options.debug:
print "##> Helper found:", helper_name, helper
if hasattr(helper, '__call__'):
return helper
return lambda: helper
# Device
def start(self):
if self.is_running():
return True
try:
self.flowgraph.start()
except Exception, e:
self._last_error = str(e)
return False
return Device.start(self)
def stop(self):
if self.is_running() == False:
return True
if self.no_stop == False:
try:
self.flowgraph.stop()
self.flowgraph.wait()
except Exception, e:
self._last_error = str(e)
return False
return Device.stop(self)
def name(self): # Raises
try:
fn = self._get_helper('source_name')
if fn:
return fn()
return self._get_helper('name', self.flowgraph.name, [".source"])()
except Exception, e:
self._last_error = str(e)
raise Exception, e
def serial(self): # Raises
try:
return self._get_helper(['serial', 'serial_number'], lambda: Device.serial(self))()
except Exception, e:
self._last_error = str(e)
raise Exception, e
def gain_range(self): # Raises
try:
fn = self._get_helper(['gain_range', 'get_gain_range'])
if fn is None:
return Device.gain_range(self)
_gr = fn()
if isinstance(_gr, GainRange):
return _gr
try:
gr = GainRange(_gr[0], _gr[1])
if len(_gr) > 2:
gr.step = _gr[2]
return gr
except:
pass
try:
gr = GainRange(_gr.start(), _gr.stop())
if hasattr(_gr, 'step'):
gr.step = _gr.step()
return gr
except:
pass
raise Exception, "Unknown type returned from gain_range"
except Exception, e:
self._last_error = str(e)
#return Device.gain_range(self)
raise Exception, e
def master_clock(self): # Raises
return self._get_helper(['master_clock', 'get_clock_rate'], lambda: Device.master_clock(self))()
def samples_per_packet(self):
return self._get_helper(['samples_per_packet', 'recv_samples_per_packet'], lambda: Device.samples_per_packet(self))()
def antennas(self):
return self._get_helper(['antennas', 'get_antennas'], lambda: Device.antennas(self))()
def gain(self, gain=None):
if gain is not None:
fn = self._get_helper('set_gain')
if fn:
res = fn(gain)
if res is not None and res == False:
return False
return Device.gain(self, gain)
return self._get_helper(['gain', 'get_gain'], lambda: Device.gain(self))()
def sample_rate(self, rate=None):
if rate is not None:
fn = self._get_helper(['set_sample_rate', 'set_samp_rate'])
if fn:
res = fn(rate)
if res is not None and res == False:
return False
return Device.sample_rate(self, rate)
return self._get_helper(['sample_rate', 'samp_rate', 'get_sample_rate', 'get_samp_rate'], lambda: Device.sample_rate(self))()
def freq(self, freq=None):
if freq is not None:
fn = self._get_helper(['set_freq', 'set_frequency', 'set_center_freq'])
res = None
if fn:
res = fn(freq)
if res is not None and res == False:
return False
self._last_tune_result = None
#if type(res) is int or type(res) is float:
tuned = freq
if res is not None and type(res) is not bool:
try:
if self.options.debug:
print "##> Frequency set returned:", res
if type(res) is list or type(res) is tuple:
self._last_tune_result = TuneResult(freq, tuned) # Should be same as res[0]
try:
#self._last_tune_result.target_rf_freq = res[0]
self._last_tune_result.actual_rf_freq = res[1]
self._last_tune_result.target_dsp_freq = res[2]
self._last_tune_result.actual_dsp_freq = res[3]
if self.options.debug:
print "##> Stored tune result:", self._last_tune_result
except Exception, e:
if self.options.debug:
print "##> Error while storing tune result:", e
tuned = self._last_tune_result.actual_rf_freq + self._last_tune_result.actual_dsp_freq
else:
temp_tune_result = TuneResult(freq, tuned)
if temp_tune_result.duck_copy(res):
if self.options.debug:
print "##> Duck copied tune result"
self._last_tune_result = temp_tune_result
else:
if self.options.debug:
print "##> Casting tune result to float"
tuned = float(res)
self._last_tune_result = None # Will be created in call to Device
except Exception, e:
if self.options.verbose:
print "##> Unknown exception while using response from frequency set:", str(res), "-", str(e)
return Device.freq(self, tuned, freq)
return self._get_helper(['freq', 'frequency', 'get_center_freq'], lambda: Device.freq(self))()
def was_tune_successful(self):
fn = self._get_helper(['was_tune_successful', 'was_tuning_successful'])
if fn:
return fn()
tolerance = None
fn = self._get_helper(['tune_tolerance', 'tuning_tolerance'])
if fn:
tolerance = fn()
if tolerance is not None:
tr = self.last_tune_result()
diff = self.freq() - (tr.actual_rf_freq + tr.actual_dsp_freq) # self.actual_dsp_freq
if abs(diff) > tolerance:
print " Difference", diff, ">", tolerance, "for", tr
if diff > 0:
return 1
else:
return -1
return Device.was_tune_successful(self)
def last_tune_result(self):
fn = self._get_helper(['last_tune_result'])
if fn:
return fn()
if self._last_tune_result is not None:
return self._last_tune_result
return Device.last_tune_result(self)
def antenna(self, antenna=None):
if antenna is not None:
fn = self._get_helper('set_antenna')
if fn:
if type(antenna) == int:
num = len(self.antennas())
if antenna < 0 or antenna >= num:
return False
antenna = self.antennas()[antenna]
if len(antenna) == 0:
return False
try:
res = fn(antenna)
if res is not None and res == False:
return False
except:
return False
return Device.antenna(self, antenna)
return self._get_helper(['antenna', 'get_antenna'], lambda: Device.antenna(self))()
# Network Transport
def destination(self, dest=None):
if dest is not None:
prev = self._destination
if NetworkTransport.destination(self, dest) == False:
return False
try:
#print "--> Connecting UDP Sink:", self._destination[0], self._destination[1]
self.sink.connect(self._destination[0], self._destination[1])
except Exception, e:
NetworkTransport.destination(self, prev)
self._last_error = str(e)
return False
return True
return NetworkTransport.destination(self)
def header(self, enable=None):
if enable is not None:
self.sink.set_borip(enable)
return NetworkTransport.header(self, enable)
def payload_size(self,size=None):
if size is not None:
self.sink.set_payload_size(size)
return NetworkTransport.payload_size(self, size)
def _format_error(error, pad=True):
if error is None or len(error) == 0:
return ""
error = error.strip()
error.replace("\\", "\\\\")
error.replace("\r", "\\r")
error.replace("\n", "\\n")
if pad:
error = " " + error
return error
def _format_device(device, transport):
if device is None or transport is None:
return "-"
return "%s|%f|%f|%f|%f|%d|%s|%s" % (
device.name(),
device.gain_range().start,
device.gain_range().stop,
device.gain_range().step,
device.master_clock(),
#device.samples_per_packet(),
(transport.payload_size()/2/2),
",".join(device.antennas()),
device.serial()
)
def _create_device(hint, options):
if options.verbose:
print "--> Creating device with hint:", hint
id = None
if (options.default is not None) and (hint is None or len(hint) == 0 or hint == "-"):
id = options.default
if (id is None) or (len(id) == 0):
id = _device_hint_mapper(hint)
if options.debug:
print "--> ID:", id
if id is None or len(id) == 0:
#return None
raise Exception, "Empty ID"
if isinstance(id, dict):
args = []
for arg in id['args']:
check = _remove_quoted(arg)
if check.count("(") != check.count(")"):
continue
args += [arg]
id = id['module'] # Must come last
else:
parts = hint.split(" ")
if len(parts) > 0 and parts[0].lower() == id.lower():
parts = parts[1:]
if len(parts) > 0:
if options.debug:
print "--> Hint parts:", parts
args = []
append = False
accum = ""
for part in parts:
quote_list = _find_quotes(part)
if (len(quote_list) % 2) != 0: # Doesn't handle "a""b" as separate args
if append == False:
append = True
accum = part
continue
else:
part = accum + part
accum = ""
append = False
quote_list = _find_quotes(part)
elif append == True:
accum += part
continue
quotes = True
key = None
value = part
idx = part.find("=")
if idx > -1 and (len(quote_list) == 0 or idx < quote_list[0]):
key = part[0:idx]
value = part[idx + 1:]
if len(quote_list) >= 2 and quote_list[0] == 0 and quote_list[-1] == (len(part) - 1):
quotes = False
else:
if quotes:
try:
dummy = float(value)
quotes = False
except:
pass
if quotes:
try:
dummy = int(value, 16)
quotes = False
dummy = value.lower()
if len(dummy) < 2 or dummy[0:2] != "0x":
value = "0x" + value
except:
pass
arg = ""
if key:
arg = key + "="
if quotes:
value = value.replace("\"", "\\\"")
arg += "\"" + value + "\""
else:
arg += value
check = _remove_quoted(arg)
if check.count("(") != check.count(")"):
continue
args += [arg]
args_str = ",".join(args)
if len(args_str) > 0:
if options.debug:
print "--> Args:", args_str
if sys.modules.has_key("borip_" + id):
if options.no_reload == False:
try:
#exec "reload(borip_" + id + ")"
module = sys.modules["borip_" + id]
print "--> Reloading:", module
reload(module)
except Exception, e:
print "~~> Failed to reload:", str(e)
#return None
raise Exception, e
# try:
# device = module["borip_" + id + "(" + args_str + ")")
# except Exception, e:
# print "~~> Failed to create from module:", str(e)
# #return None
# raise Exception, e
#else:
try:
exec "import borip_" + id
except Exception, e:
print "~~> Failed to import:", str(e)
traceback.print_exc()
#return None
raise Exception, e
try:
device = eval("borip_" + id + ".borip_" + id + "(" + args_str + ")")
except Exception, e:
print "~~> Failed to create:", str(e)
traceback.print_exc()
#return None
raise Exception, e
device = _wrap_device(device, options)
print "--> Created device:", device
return device
def _wrap_device(device, options):
#parents = device.__class__.__bases__
#if Device not in parents:
if issubclass(device.__class__, Device) == False:
try:
if isinstance(device, server) and device.parent is not None:
print "--> Using GnuRadioDevice wrapper for parent", device.parent, "of", device
device = GnuRadioDevice(device.parent, options, device.sink, options.fixed_flowgraph)
#if gr.top_block in parents:
elif issubclass(device.__class__, gr.top_block):
print "--> Using GnuRadioDevice wrapper for", device
device = GnuRadioDevice(device, options)
elif issubclass(device.__class__, gr.hier_block2):
print "--> Using Server and GnuRadioDevice wrapper for", device
device = GnuRadioDevice(ServerWrapper(device, options), options)
else:
print "~~> Device interface in", device, "not found:", parents
#return None
raise Exception, "Device interface not found"
except Exception, e:
print "~~> Failed to wrap:", str(e)
traceback.print_exc()
#return None
raise Exception, e
#parents = device.__class__.__bases__
#if NetworkTransport not in parents:
if issubclass(device.__class__, NetworkTransport) == False:
print "~~> NetworkTransport interface in", device, "not found:", parents
#return None
raise Exception, "NetworkTransport interface not found"
try:
if device.open() == False:
print "~~> Failed to initialise device:", device
device.close()
#return None
raise Exception, "Failed to initialise device"
except Exception, e:
print "~~> Failed to open:", str(e)
traceback.print_exc()
#return None
raise Exception, e
return device
def _find_quotes(s):
b = False
e = False
list = []
i = -1
for c in s:
i += 1
if c == '\\':
e = True
continue
elif e:
e = False
continue
if c == '"':
list += [i]
return list
def _remove_quoted(data):
r = ""
list = _find_quotes(data)
if len(list) == 0:
return data
last = 0
b = False
for l in list:
if b == False:
r += data[last:l]
b = True
else:
last = l + 1
b = False
if b == False:
r += data[last:]
return r
class ThreadedTCPRequestHandler(SocketServer.StreamRequestHandler): # BaseRequestHandler
# No __init__
def setup(self):
SocketServer.StreamRequestHandler.setup(self)
print "==> Connection from:", self.client_address
self.device = None
self.transport = NetworkTransport() # Create dummy to avoid 'None' checks in code
device = None
if self.server.block:
if self.server.device:
self.server.device.close()
self.server.device = None
device = _wrap_device(self.server.block, self.server.options)
elif self.server.device:
device = self.server.device
if device:
transport = device # MAGIC
if transport.destination(self.client_address[0]):
self.device = device
self.transport = transport
else:
device.close()
with self.server.clients_lock:
self.server.clients.append(self)
first_response = "DEVICE " + _format_device(self.device, self.transport)
if self.server.options.command:
print "< " + first_response
self.send(first_response)
def handle(self):
buffer = ""
while True:
data = "" # Initialise to nothing so if there's an exception it'll disconnect
try:
data = self.request.recv(1024) # 4096
except socket.error, (e, msg):
if e != 104: # Connection reset by peer
print "==>", self.client_address, "-", msg
#data = self.rfile.readline().strip()
if len(data) == 0:
break
#cur_thread = threading.currentThread()
#response = "%s: %s" % (cur_thread.getName(), data)
#self.request.send(response)
buffer += data
lines = buffer.splitlines(True)
for line in lines:
if line[-1] != '\n':
buffer = line
break
line = line.strip()
if self.process_client_command(line) == False:
break
else:
buffer = ""
def finish(self):
print "==> Disconnection from:", self.client_address
if self.device:
print "--> Closing device:", self.device
self.close_device()
with self.server.clients_lock:
self.server.clients.remove(self)
try:
SocketServer.StreamRequestHandler.finish(self)
except socket.error, (e, msg):
if e != 32: # Broken pipe
print "==>", self.client_address, "-", msg
def close_device(self):
if self.device:
self.device.close()
#del self.device
#gc.collect()
self.device = None
self.transport = NetworkTransport() # MAGIC
def process_client_command(self, command):
if self.server.options.command:
print ">", command
data = None
idx = command.find(" ")
if idx > -1:
data = command[idx+1:].strip();
command = command[0:idx]
command = command.upper()
result = "OK"
try:
if command == "GO":
if self.device:
if self.device.is_running():
result += " RUNNING"
else:
if self.device.start() != True:
result = "FAIL" + _format_error(self.device.last_error())
else:
result = "DEVICE"
elif command == "STOP":
if self.device:
if self.device.is_running():
result += " STOPPED"
self.device.stop()
else:
result = "DEVICE"
elif command == "DEVICE":
error = ""
if (self.server.options.lock == False) and data is not None and (len(data) > 0):
self.close_device()
if data != "!":
try:
device = _create_device(data, self.server.options)
if device:
transport = device # MAGIC
if transport.destination(self.client_address[0]) == False:
error = "Failed to initialise NetworkTransport"
device.close()
else:
error = "Failed to create device"
except Exception, e:
traceback.print_exc()
error = str(e)
if len(error) == 0:
self.device = device
self.transport = transport
result = _format_device(self.device, self.transport) + _format_error(error)
elif command == "FREQ":
if self.device:
if data is None:
result = str(self.device.freq())
else:
freq = 0.0
try:
freq = float(data)
except:
pass
if self.device.freq(freq):
success = self.device.was_tune_successful()
if success < 0:
result = "LOW"
elif success > 0:
result = "HIGH"
tune_result = self.device.last_tune_result()
result += " %f %f %f %f" % (tune_result.target_rf_freq, tune_result.actual_rf_freq, tune_result.target_dsp_freq, tune_result.actual_dsp_freq)
else:
result = "DEVICE"
elif command == "ANTENNA":
if self.device:
if data is None:
result = str(self.device.antenna())
if result is None or len(result) == 0:
result = "UNKNOWN"
else:
if self.device.antenna(data) == False:
result = "FAIL" + _format_error(self.device.last_error())
else:
result = "DEVICE"
elif command == "GAIN":
if self.device:
if data is None:
result = str(self.device.gain())
else:
gain = 0.0
try:
gain = float(data)
except:
pass
if self.device.gain(gain):
#result += " " + str(self.device.gain())
pass
else:
result = "FAIL" + _format_error(self.device.last_error())
else:
result = "DEVICE"
elif command == "RATE":
if self.device:
if data is None:
result = str(self.device.sample_rate())
else:
rate = 0.0
try:
rate = float(data)
except:
pass
if self.device.sample_rate(rate):
result += " " + str(self.device.sample_rate())
else:
result = "FAIL" + _format_error(self.device.last_error())
else:
result = "DEVICE"
elif command == "DEST":
if data is None:
result = self.transport.destination()[0] + ":" + str(self.transport.destination()[1])
else:
if data == "-":
data = self.client_address[0]
if self.transport.destination(data):
result += " " + self.transport.destination()[0] + ":" + str(self.transport.destination()[1])
else:
result = "FAIL Failed to set destination"
elif command == "HEADER":
if data is None:
if self.transport.header():
result = "ON"
else:
result = "OFF"
else:
self.transport.header(data.upper() == "OFF")
#########################################
#elif command == "SHUTDOWN":
# quit_event.set()
# return False
#########################################
else:
result = "UNKNOWN"
except Exception, e:
if command == "DEVICE":
result = "-"
else:
result = "FAIL"
result += " " + str(e)
traceback.print_exc()
if result is None or len(result) == 0:
return True
result = command + " " + result
if self.server.options.command:
print "<", result
return self.send(result)
def send(self, data):
try:
self.wfile.write(data + "\n")
return True
except socket.error, (e, msg):
if e != 32: # Broken pipe
print "==>", self.client_address, "-", msg
return False
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
allow_reuse_address = True
def _generate_options(args=None):
usage="%prog: [options]"
parser = OptionParser(usage=usage) #option_class=eng_option,
parser.add_option("-l", "--listen", type="int", help="server listen port", default=28888) #, metavar="SUBDEV"
parser.add_option("-p", "--port", type="int", help="default data port", default=28888)
parser.add_option("-v", "--verbose", action="store_true", help="verbose output", default=False)
parser.add_option("-g", "--debug", action="store_true", help="debug output", default=False)
parser.add_option("-c", "--command", action="store_true", help="command output", default=False)
parser.add_option("-r", "--realtime", action="store_true", help="realtime scheduling", default=False)
parser.add_option("-R", "--no-reload", action="store_true", help="disable dynamic reload", default=False)
parser.add_option("-d", "--device", type="string", help="immediately create device", default=None)
parser.add_option("-L", "--lock", action="store_true", help="lock device", default=False)
parser.add_option("-D", "--default", type="string", help="device to create when default hint supplied", default=None)
if args:
if not isinstance(args, list):
args = [args]
(options, args) = parser.parse_args(args)
else:
(options, args) = parser.parse_args()
options.fixed_flowgraph = False
return (options, args)
class borip_server():
def __init__(self, block=None, options=None):
self.server = None
self.server_thread = None
if options is None or isinstance(options, str):
(options, args) = _generate_options(options)
self.options = options
self.block = block
def __del__(self):
self.stop()
def start(self):
if self.server is not None:
return True
device = None
if self.block is None and self.options.device is not None:
try:
device = _create_device(self.options.device, self.options)
except Exception, e:
print "!!> Failed to create initial device with hint:", self.options.device
HOST, PORT = "", self.options.listen
print "==> Starting TCP server on port:", PORT
while True:
try:
self.server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
self.server.options = self.options
self.server.block = self.block
self.server.device = device
self.server.clients_lock = threading.Lock()
self.server.clients = []
#ip, port = self.server.server_address
self.server_thread = threading.Thread(target=self.server.serve_forever)
self.server_thread.setDaemon(True)
self.server_thread.start()
print "==> TCP server running on", self.server.server_address, "in thread:", self.server_thread.getName()
except socket.error, (e, msg):
print " Socket error:", msg
if (e == 98): # Still in use
print " Waiting, then trying again..."
try:
time.sleep(5)
except KeyboardInterrupt:
print " Aborting"
sys.exit(1)
continue
sys.exit(1)
break
if self.options.realtime:
if gr.enable_realtime_scheduling() == gr.RT_OK:
print "==> Enabled realtime scheduling"
else:
print "!!> Failed to enable realtime scheduling"
return True
def stop(self):
if self.server is None:
return True
print "==> Closing server..."
self.server.server_close() # Clients still exist
with self.server.clients_lock:
# Calling 'shutdown' here causes clients not to handle close via 'finish'
for client in self.server.clients:
print "<<< Closing:", client.client_address
try:
client.request.shutdown(socket.SHUT_RDWR)
client.request.close()
except:
pass
self.server.shutdown()
self.server_thread.join()
self.server = None
return True
def main():
server = borip_server()
server.start()
try:
while True:
raw_input()
except KeyboardInterrupt:
print
except EOFError:
pass
except Exception, e:
print "==> Unhandled exception:", str(e)
server.stop()
print "==> Done"
if __name__ == "__main__":
main()
| gpl-3.0 |
suutari-ai/shoop | shuup/core/models/_currencies.py | 3 | 2858 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import decimal
import babel
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinLengthValidator
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from shuup.core import cache
from shuup.utils.analog import define_log_model
@python_2_unicode_compatible
class Currency(models.Model):
identifier_attr = 'code'
code = models.CharField(verbose_name=_("code"),
max_length=3, unique=True,
editable=True,
validators=[MinLengthValidator(3)],
help_text=_("The ISO-4217 code of the currency"))
decimal_places = models.PositiveSmallIntegerField(verbose_name=_("decimal places"),
validators=[MaxValueValidator(10)],
default=2,
help_text=_(
"The number of decimal places supported by this currency."))
def clean(self):
super(Currency, self).clean()
# make sure the code is a valid ISO-4217 currency
if self.code not in babel.Locale("en").currencies:
raise ValidationError(_('Enter a valid ISO-4217 currency code'))
def save(self, *args, **kwargs):
super(Currency, self).save(*args, **kwargs)
cache.bump_version('currency_precision')
class Meta:
verbose_name = _("currency")
verbose_name_plural = _("currencies")
def __str__(self):
return self.code
def get_currency_precision(currency):
"""
Get precision by currency code.
Precision values will be populated from the ``decimal_places``
fields of the `Currency` objects in the database.
:type currency: str
:param currency: Currency code as 3-letter string (ISO-4217)
:rtype: decimal.Decimal|None
:return: Precision value for given currency code or None for unknown
"""
cache_key = 'currency_precision:' + currency
precision = cache.get(cache_key)
if precision is None:
currency_obj = Currency.objects.filter(code=currency).first()
precision = (
decimal.Decimal('0.1') ** currency_obj.decimal_places
if currency_obj else None)
cache.set(cache_key, precision)
return precision
CurrencyLogEntry = define_log_model(Currency)
| agpl-3.0 |
DalikarFT/CFVOP | venv/Lib/site-packages/pip/_vendor/requests/structures.py | 615 | 3012 | # -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
import collections
from .compat import OrderedDict
class CaseInsensitiveDict(collections.MutableMapping):
"""A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = OrderedDict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key):
return self._store[key.lower()][1]
def __delitem__(self, key):
del self._store[key.lower()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return str(dict(self.items()))
class LookupDict(dict):
"""Dictionary lookup object."""
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>' % (self.name)
def __getitem__(self, key):
# We allow fall-through here, so values default to None
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
| gpl-3.0 |
PUM-9/mickwald_client | scripts/mickClient.py | 1 | 1457 | #!/usr/bin/env python
import rospy
from chat_server.msg import Message
import sys
def select_mode():
mode = sys.argv[sys.argv.__len__()-1]
if mode == 'read':
read_client()
if mode == 'send':
run_client()
if mode != 'read':
if mode != 'send':
print("Usage: use \"read\" or \"send\" as arguments for respective mode")
return
def run_client():
pub = rospy.Publisher('chat_out', Message, queue_size=10)
rospy.init_node('mick_client', anonymous=True)
rospy.Subscriber('chat_in', Message, callback)
name = raw_input('Choose name: ')
message = 0
shutdown = False
while not shutdown:
message = raw_input(name + ': ')
if message[0:5] == '/name':
name = message[6:]
elif message[0:5] == '/exit':
shutdown = not shutdown
else:
pub.publish(name, message)
return
def callback(message):
if message.sender == "":
print(\n + "Noname" + " says: " + message.message + "\n" + name)
else:
print(\n + message.sender + " says: " + message.message + "\n" + name)
def read_client():
rospy.init_node('mickChatListener', anonymous=True)
rospy.Subscriber('chat_in', Message, callback)
print("Now accepting messages\n")
rate = rospy.Rate(10)
rospy.spin()
return
if __name__ == "__main__":
try:
run_client()
except rospy.ROSInterruptException:
pass
| gpl-3.0 |
nomaro/SickBeard_Backup | sickbeard/clients/requests/packages/urllib3/request.py | 245 | 5873 | # urllib3/request.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from .filepost import encode_multipart_formdata
__all__ = ['RequestMethods']
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
:class:`~urllib3.poolmanager.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
Specifically,
:meth:`.request_encode_url` is for sending requests whose fields are encoded
in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-orm-urlencoded
(such as for POST, PUT, PATCH).
:meth:`.request` is for making any kind of request, it will look up the
appropriate encoding format and use one of the above two methods to make
the request.
Initializer parameters:
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
_encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
_encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE'])
def __init__(self, headers=None):
self.headers = headers or {}
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**kw): # Abstract
raise NotImplemented("Classes extending RequestMethods must implement "
"their own ``urlopen`` method.")
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the option
to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
method = method.upper()
if method in self._encode_url_methods:
return self.request_encode_url(method, url, fields=fields,
headers=headers,
**urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields,
headers=headers,
**urlopen_kw)
def request_encode_url(self, method, url, fields=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
"""
if fields:
url += '?' + urlencode(fields)
return self.urlopen(method, url, **urlopen_kw)
def request_encode_body(self, method, url, fields=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the
payload with the appropriate content type. Otherwise
:meth:`urllib.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request signing,
such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example: ::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimick behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will be
overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
if encode_multipart:
body, content_type = encode_multipart_formdata(fields or {},
boundary=multipart_boundary)
else:
body, content_type = (urlencode(fields or {}),
'application/x-www-form-urlencoded')
if headers is None:
headers = self.headers
headers_ = {'Content-Type': content_type}
headers_.update(headers)
return self.urlopen(method, url, body=body, headers=headers_,
**urlopen_kw)
| gpl-3.0 |
cvegaj/ElectriCERT | venv3/lib/python3.6/site-packages/pip/vcs/__init__.py | 344 | 12374 | """Handles all VCS (version control) support"""
from __future__ import absolute_import
import errno
import logging
import os
import shutil
import sys
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.exceptions import BadCommand
from pip.utils import (display_path, backup_dir, call_subprocess,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
logger = logging.getLogger(__name__)
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control
# systems
urllib_parse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warning('Cannot register VCS %s', cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
logger.debug('Registered VCS backend: %s', cls.name)
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warning('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
if vc_type.controls_location(location):
logger.debug('Determine that %s uses VCS: %s',
location, vc_type.name)
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
# List of supported schemes for this Version Control
schemes = ()
def __init__(self, url=None, *args, **kwargs):
self.url = url
super(VersionControl, self).__init__(*args, **kwargs)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
# See issue #1083 for why this method was introduced:
# https://github.com/pypa/pip/issues/1083
def translate_egg_surname(self, surname):
# For example, Django has branches of the form "stable/1.7.x".
return surname.replace('/', '_')
def export(self, location):
"""
Export the repository at the url to the destination location
i.e. only download the files, without vcs informations
"""
raise NotImplementedError
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any
trailing slash.
"""
return urllib_parse.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplementedError
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_version(self, dest, rev_options):
"""
Return True if the version is identical to what exists and
doesn't need to be updated.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.debug(
'%s in %s exists, and has correct URL (%s)',
self.repo_name.title(),
display_path(dest),
url,
)
if not self.check_version(dest, rev_options):
logger.info(
'Updating %s %s%s',
display_path(dest),
self.repo_name,
rev_display,
)
self.update(dest, rev_options)
else:
logger.info(
'Skipping because already up-to-date.')
else:
logger.warning(
'%s %s in %s exists with URL %s',
self.name,
self.repo_name,
display_path(dest),
existing_url,
)
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warning(
'Directory %s already exists, and is not a %s %s.',
dest,
self.name,
self.repo_name,
)
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warning(
'The plan is to install the %s repository %s',
self.name,
url,
)
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.info(
'Switching %s %s to %s%s',
self.repo_name,
display_path(dest),
url,
rev_display,
)
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
checkout = True
elif response == 'a':
sys.exit(-1)
return checkout
def unpack(self, location):
"""
Clean up current location and download the url repository
(and vcs infos) into location
"""
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location):
"""
Return a string representing the requirement needed to
redownload the files currently present in location, something
like:
{repository_url}@{revision}#egg={project_name}-{version_identifier}
"""
raise NotImplementedError
def get_url(self, location):
"""
Return the url used at location
Used in get_info or check_destination
"""
raise NotImplementedError
def get_revision(self, location):
"""
Return the current revision of the files at location
Used in get_info
"""
raise NotImplementedError
def run_command(self, cmd, show_stdout=True, cwd=None,
on_returncode='raise',
command_desc=None,
extra_environ=None, spinner=None):
"""
Run a VCS subcommand
This is simply a wrapper around call_subprocess that adds the VCS
command name, and checks that the VCS is available
"""
cmd = [self.name] + cmd
try:
return call_subprocess(cmd, show_stdout, cwd,
on_returncode,
command_desc, extra_environ,
spinner)
except OSError as e:
# errno.ENOENT = no such file or directory
# In other words, the VCS executable isn't available
if e.errno == errno.ENOENT:
raise BadCommand('Cannot find command %r' % self.name)
else:
raise # re-raise exception if a different error occurred
@classmethod
def controls_location(cls, location):
"""
Check if a location is controlled by the vcs.
It is meant to be overridden to implement smarter detection
mechanisms for specific vcs.
"""
logger.debug('Checking in %s for %s (%s)...',
location, cls.dirname, cls.name)
path = os.path.join(location, cls.dirname)
return os.path.exists(path)
def get_src_requirement(dist, location):
version_control = vcs.get_backend_from_location(location)
if version_control:
try:
return version_control().get_src_requirement(dist,
location)
except BadCommand:
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
version_control.name,
)
return dist.as_requirement()
logger.warning(
'cannot determine version of editable source in %s (is not SVN '
'checkout, Git clone, Mercurial clone or Bazaar branch)',
location,
)
return dist.as_requirement()
| gpl-3.0 |
timoreimann/kubernetes | vendor/github.com/ugorji/go/codec/test.py | 1516 | 4019 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# sudo apt-get install python-dev
# sudo apt-get install python-pip
# pip install --user msgpack-python msgpack-rpc-python cbor
# Ensure all "string" keys are utf strings (else encoded as bytes)
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464.0,
6464646464.0,
False,
True,
u"null",
None,
u"someday",
1328176922000002000,
u"",
-2206187877999998000,
u"bytestring",
270,
u"none",
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": u"True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": u"123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": u"1234567890" },
{ True: "true", 138: False, "false": 200 }
]
l = []
l.extend(l0)
l.append(l0)
l.append(1)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| apache-2.0 |
Red-M/CloudBot-legacy | plugins/horoscope.py | 6 | 1731 | # Plugin by Infinity - <https://github.com/infinitylabs/UguuBot>
from util import hook, http, text
db_ready = False
def db_init(db):
"""check to see that our db has the horoscope table and return a connection."""
global db_ready
if not db_ready:
db.execute("create table if not exists horoscope(nick primary key, sign)")
db.commit()
db_ready = True
@hook.command(autohelp=False)
def horoscope(inp, db=None, notice=None, nick=None):
"""horoscope <sign> -- Get your horoscope."""
db_init(db)
# check if the user asked us not to save his details
dontsave = inp.endswith(" dontsave")
if dontsave:
sign = inp[:-9].strip().lower()
else:
sign = inp
db.execute("create table if not exists horoscope(nick primary key, sign)")
if not sign:
sign = db.execute("select sign from horoscope where nick=lower(?)",
(nick,)).fetchone()
if not sign:
notice("horoscope <sign> -- Get your horoscope")
return
sign = sign[0]
url = "http://my.horoscope.com/astrology/free-daily-horoscope-{}.html".format(sign)
soup = http.get_soup(url)
title = soup.find_all('h1', {'class': 'h1b'})[1]
horoscope_text = soup.find('div', {'class': 'fontdef1'})
result = u"\x02%s\x02 %s" % (title, horoscope_text)
result = text.strip_html(result)
#result = unicode(result, "utf8").replace('flight ','')
if not title:
return "Could not get the horoscope for {}.".format(inp)
if inp and not dontsave:
db.execute("insert or replace into horoscope(nick, sign) values (?,?)",
(nick.lower(), sign))
db.commit()
return result
| gpl-3.0 |
ZuluPro/libcloud | libcloud/test/compute/test_medone.py | 12 | 1295 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from libcloud.compute.drivers.medone import MedOneNodeDriver
from libcloud.test.compute.test_dimensiondata_v2_3 import DimensionDataMockHttp, DimensionData_v2_3_Tests
class MedOneTests(DimensionData_v2_3_Tests, unittest.TestCase):
def setUp(self):
MedOneNodeDriver.connectionCls.conn_class = DimensionDataMockHttp
MedOneNodeDriver.connectionCls.active_api_version = '2.3'
DimensionDataMockHttp.type = None
self.driver = MedOneNodeDriver('user', 'password')
| apache-2.0 |
fengsp/flask-snippets | utilities/content_negotiated_error.py | 2 | 1167 | # -*- coding: utf-8 -*-
"""
utilities.context_global_socketio
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Using Context Globals with Gevent-Socketio
http://flask.pocoo.org/snippets/105/
"""
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from socketio import socketio_manage
@app.route('/socket.io/<path:path>')
def run_socketio(path):
real_request = request._get_current_object()
socketio_manage(request.environ, {'': FlaskNamespace},
request=real_request)
return Response()
from socketio.namespace import BaseNamespace
class FlaskNamespace(BaseNamespace):
def __init__(self, *args, **kwargs):
request = kwargs.get('request', None)
self.ctx = None
if request:
self.ctx = current_app.request_context(request.environ)
self.ctx.push()
current_app.preprocess_request()
del kwargs['request']
super(BaseNamespace, self).__init__(*args, **kwargs)
def disconnect(self, *args, **kwargs):
if self.ctx:
self.ctx.pop()
super(BaseNamespace, self).disconnect(*args, **kwargs)
| bsd-3-clause |
Floobits/plugin-common-python | repo.py | 8 | 1963 | import os
import stat
import subprocess
from xml.etree import ElementTree
try:
from . import api, msg
from . import shared as G
from .exc_fmt import str_e
assert api and G and msg and str_e
except ImportError:
import api
import msg
import shared as G
from exc_fmt import str_e
REPO_MAPPING = {
'git': {
'dir': '.git',
'cmd': ['git', 'config', '--get', 'remote.origin.url'],
},
'svn': {
'dir': '.svn',
'cmd': ['svn', 'info', '--xml'],
},
'hg': {
'dir': '.hg',
'cmd': ['hg', 'paths', 'default'],
},
}
def detect_type(d):
for repo_type, v in REPO_MAPPING.items():
repo_path = os.path.join(d, v['dir'])
try:
s = os.stat(repo_path)
except Exception:
continue
if stat.S_ISDIR(s.st_mode):
return repo_type
def parse_svn_xml(d):
root = ElementTree.XML(d)
repo_url = root.find('info/entry/url')
return repo_url and repo_url.text
def get_info(workspace_url, project_dir):
repo_type = detect_type(project_dir)
if not repo_type:
return
msg.debug('Detected ', repo_type, ' repo in ', project_dir)
data = {
'type': repo_type,
}
cmd = REPO_MAPPING[repo_type]['cmd']
try:
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=project_dir)
result = p.communicate()
repo_url = result[0].decode('utf-8').strip()
if repo_type == 'svn':
repo_url = parse_svn_xml(repo_url)
msg.log(repo_type, ' url is ', repo_url)
if not repo_url:
msg.error('Error getting ', repo_type, ' url:', result[1])
return
except Exception as e:
msg.error('Error getting ', repo_type, ' url:', str_e(e))
return
data['url'] = repo_url
return data
| apache-2.0 |
jzt5132/scikit-learn | examples/svm/plot_rbf_parameters.py | 132 | 8096 | '''
==================
RBF SVM parameters
==================
This example illustrates the effect of the parameters ``gamma`` and ``C`` of
the Radial Basis Function (RBF) kernel SVM.
Intuitively, the ``gamma`` parameter defines how far the influence of a single
training example reaches, with low values meaning 'far' and high values meaning
'close'. The ``gamma`` parameters can be seen as the inverse of the radius of
influence of samples selected by the model as support vectors.
The ``C`` parameter trades off misclassification of training examples against
simplicity of the decision surface. A low ``C`` makes the decision surface
smooth, while a high ``C`` aims at classifying all training examples correctly
by giving the model freedom to select more samples as support vectors.
The first plot is a visualization of the decision function for a variety of
parameter values on a simplified classification problem involving only 2 input
features and 2 possible target classes (binary classification). Note that this
kind of plot is not possible to do for problems with more features or target
classes.
The second plot is a heatmap of the classifier's cross-validation accuracy as a
function of ``C`` and ``gamma``. For this example we explore a relatively large
grid for illustration purposes. In practice, a logarithmic grid from
:math:`10^{-3}` to :math:`10^3` is usually sufficient. If the best parameters
lie on the boundaries of the grid, it can be extended in that direction in a
subsequent search.
Note that the heat map plot has a special colorbar with a midpoint value close
to the score values of the best performing models so as to make it easy to tell
them appart in the blink of an eye.
The behavior of the model is very sensitive to the ``gamma`` parameter. If
``gamma`` is too large, the radius of the area of influence of the support
vectors only includes the support vector itself and no amount of
regularization with ``C`` will be able to prevent overfitting.
When ``gamma`` is very small, the model is too constrained and cannot capture
the complexity or "shape" of the data. The region of influence of any selected
support vector would include the whole training set. The resulting model will
behave similarly to a linear model with a set of hyperplanes that separate the
centers of high density of any pair of two classes.
For intermediate values, we can see on the second plot that good models can
be found on a diagonal of ``C`` and ``gamma``. Smooth models (lower ``gamma``
values) can be made more complex by selecting a larger number of support
vectors (larger ``C`` values) hence the diagonal of good performing models.
Finally one can also observe that for some intermediate values of ``gamma`` we
get equally performing models when ``C`` becomes very large: it is not
necessary to regularize by limiting the number of support vectors. The radius of
the RBF kernel alone acts as a good structural regularizer. In practice though
it might still be interesting to limit the number of support vectors with a
lower value of ``C`` so as to favor models that use less memory and that are
faster to predict.
We should also note that small differences in scores results from the random
splits of the cross-validation procedure. Those spurious variations can be
smoothed out by increasing the number of CV iterations ``n_iter`` at the
expense of compute time. Increasing the value number of ``C_range`` and
``gamma_range`` steps will increase the resolution of the hyper-parameter heat
map.
'''
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
from sklearn.svm import SVC
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_iris
from sklearn.cross_validation import StratifiedShuffleSplit
from sklearn.grid_search import GridSearchCV
# Utility function to move the midpoint of a colormap to be around
# the values of interest.
class MidpointNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
##############################################################################
# Load and prepare data set
#
# dataset for grid search
iris = load_iris()
X = iris.data
y = iris.target
# Dataset for decision function visualization: we only keep the first two
# features in X and sub-sample the dataset to keep only 2 classes and
# make it a binary classification problem.
X_2d = X[:, :2]
X_2d = X_2d[y > 0]
y_2d = y[y > 0]
y_2d -= 1
# It is usually a good idea to scale the data for SVM training.
# We are cheating a bit in this example in scaling all of the data,
# instead of fitting the transformation on the training set and
# just applying it on the test set.
scaler = StandardScaler()
X = scaler.fit_transform(X)
X_2d = scaler.fit_transform(X_2d)
##############################################################################
# Train classifiers
#
# For an initial search, a logarithmic grid with basis
# 10 is often helpful. Using a basis of 2, a finer
# tuning can be achieved but at a much higher cost.
C_range = np.logspace(-2, 10, 13)
gamma_range = np.logspace(-9, 3, 13)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(y, n_iter=5, test_size=0.2, random_state=42)
grid = GridSearchCV(SVC(), param_grid=param_grid, cv=cv)
grid.fit(X, y)
print("The best parameters are %s with a score of %0.2f"
% (grid.best_params_, grid.best_score_))
# Now we need to fit a classifier for all parameters in the 2d version
# (we use a smaller set of parameters here because it takes a while to train)
C_2d_range = [1e-2, 1, 1e2]
gamma_2d_range = [1e-1, 1, 1e1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf = SVC(C=C, gamma=gamma)
clf.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf))
##############################################################################
# visualization
#
# draw visualization of parameter effects
plt.figure(figsize=(8, 6))
xx, yy = np.meshgrid(np.linspace(-3, 3, 200), np.linspace(-3, 3, 200))
for (k, (C, gamma, clf)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),
size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
# plot the scores of the grid
# grid_scores_ contains parameter settings and scores
# We extract just the scores
scores = [x[1] for x in grid.grid_scores_]
scores = np.array(scores).reshape(len(C_range), len(gamma_range))
# Draw heatmap of the validation accuracy as a function of gamma and C
#
# The score are encoded as colors with the hot colormap which varies from dark
# red to bright yellow. As the most interesting scores are all located in the
# 0.92 to 0.97 range we use a custom normalizer to set the mid-point to 0.92 so
# as to make it easier to visualize the small variations of score values in the
# interesting range while not brutally collapsing all the low score values to
# the same color.
plt.figure(figsize=(8, 6))
plt.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)
plt.imshow(scores, interpolation='nearest', cmap=plt.cm.hot,
norm=MidpointNormalize(vmin=0.2, midpoint=0.92))
plt.xlabel('gamma')
plt.ylabel('C')
plt.colorbar()
plt.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45)
plt.yticks(np.arange(len(C_range)), C_range)
plt.title('Validation accuracy')
plt.show()
| bsd-3-clause |
margaritis/iTerm2 | tests/esctest/escargs.py | 31 | 2509 | import argparse
# To enable this option, add the following line to ~/.Xresources:
# xterm*disallowedWindowOps:
# Then run "xrdb -merge ~/.Xresources" and restart xterm.
XTERM_WINOPS_ENABLED = "xtermWinopsEnabled"
DISABLE_WIDE_CHARS = "disableWideChars"
ACTION_RUN="run"
ACTION_LIST_KNOWN_BUGS="list-known-bugs"
parser = argparse.ArgumentParser()
parser.add_argument("--disable-xterm-checksum-bug",
help="Don't use buggy parameter order for DECRQCRA",
action="store_true")
parser.add_argument("--include",
help="Regex for names of tests to run.",
default=".*")
parser.add_argument("--expected-terminal",
help="Terminal in use. Modifies tests for known differences.",
choices=("iTerm2", "xterm"),
default="iTerm2")
parser.add_argument("--no-print-logs",
help="Print logs after finishing?",
action="store_true")
parser.add_argument("--test-case-dir",
help="Create files with test cases in the specified directory",
default=None)
parser.add_argument("--stop-on-failure",
help="Stop running tests after a failure.",
action="store_true")
parser.add_argument("--force",
help="If set, assertions won't stop execution.",
action="store_true")
parser.add_argument("--options",
help="Space-separated options that are enabled.",
nargs="+",
choices=[ XTERM_WINOPS_ENABLED, DISABLE_WIDE_CHARS ])
parser.add_argument("--max-vt-level",
help="Do not run tests requiring a higher VT level than this.",
type=int,
default=5)
parser.add_argument("--logfile",
help="Log file to write output to",
default="/tmp/esctest.log")
parser.add_argument("--v",
help="Verbosity level. 1=errors, 2=errors and info, 3=debug, errors, and info",
default=2,
type=int)
parser.add_argument("--action",
help="Action to perform.",
default=ACTION_RUN,
choices=[ ACTION_RUN, ACTION_LIST_KNOWN_BUGS ])
parser.add_argument("--timeout",
help="Timeout for reading reports from terminal.",
default=1,
type=float)
| gpl-2.0 |
splav/servo | tests/wpt/web-platform-tests/tools/third_party/pytest/testing/python/integration.py | 30 | 13137 | import pytest
from _pytest import python
from _pytest import runner
class TestOEJSKITSpecials(object):
def test_funcarg_non_pycollectobj(self, testdir): # rough jstests usage
testdir.makeconftest(
"""
import pytest
def pytest_pycollect_makeitem(collector, name, obj):
if name == "MyClass":
return MyCollector(name, parent=collector)
class MyCollector(pytest.Collector):
def reportinfo(self):
return self.fspath, 3, "xyz"
"""
)
modcol = testdir.getmodulecol(
"""
import pytest
@pytest.fixture
def arg1(request):
return 42
class MyClass(object):
pass
"""
)
# this hook finds funcarg factories
rep = runner.collect_one_node(collector=modcol)
clscol = rep.result[0]
clscol.obj = lambda arg1: None
clscol.funcargs = {}
pytest._fillfuncargs(clscol)
assert clscol.funcargs["arg1"] == 42
def test_autouse_fixture(self, testdir): # rough jstests usage
testdir.makeconftest(
"""
import pytest
def pytest_pycollect_makeitem(collector, name, obj):
if name == "MyClass":
return MyCollector(name, parent=collector)
class MyCollector(pytest.Collector):
def reportinfo(self):
return self.fspath, 3, "xyz"
"""
)
modcol = testdir.getmodulecol(
"""
import pytest
@pytest.fixture(autouse=True)
def hello():
pass
@pytest.fixture
def arg1(request):
return 42
class MyClass(object):
pass
"""
)
# this hook finds funcarg factories
rep = runner.collect_one_node(modcol)
clscol = rep.result[0]
clscol.obj = lambda: None
clscol.funcargs = {}
pytest._fillfuncargs(clscol)
assert not clscol.funcargs
def test_wrapped_getfslineno():
def func():
pass
def wrap(f):
func.__wrapped__ = f
func.patchings = ["qwe"]
return func
@wrap
def wrapped_func(x, y, z):
pass
fs, lineno = python.getfslineno(wrapped_func)
fs2, lineno2 = python.getfslineno(wrap)
assert lineno > lineno2, "getfslineno does not unwrap correctly"
class TestMockDecoration(object):
def test_wrapped_getfuncargnames(self):
from _pytest.compat import getfuncargnames
def wrap(f):
def func():
pass
func.__wrapped__ = f
return func
@wrap
def f(x):
pass
values = getfuncargnames(f)
assert values == ("x",)
@pytest.mark.xfail(
strict=False, reason="getfuncargnames breaks if mock is imported"
)
def test_wrapped_getfuncargnames_patching(self):
from _pytest.compat import getfuncargnames
def wrap(f):
def func():
pass
func.__wrapped__ = f
func.patchings = ["qwe"]
return func
@wrap
def f(x, y, z):
pass
values = getfuncargnames(f)
assert values == ("y", "z")
def test_unittest_mock(self, testdir):
pytest.importorskip("unittest.mock")
testdir.makepyfile(
"""
import unittest.mock
class T(unittest.TestCase):
@unittest.mock.patch("os.path.abspath")
def test_hello(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_unittest_mock_and_fixture(self, testdir):
pytest.importorskip("unittest.mock")
testdir.makepyfile(
"""
import os.path
import unittest.mock
import pytest
@pytest.fixture
def inject_me():
pass
@unittest.mock.patch.object(os.path, "abspath",
new=unittest.mock.MagicMock)
def test_hello(inject_me):
import os
os.path.abspath("hello")
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_unittest_mock_and_pypi_mock(self, testdir):
pytest.importorskip("unittest.mock")
pytest.importorskip("mock", "1.0.1")
testdir.makepyfile(
"""
import mock
import unittest.mock
class TestBoth(object):
@unittest.mock.patch("os.path.abspath")
def test_hello(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
@mock.patch("os.path.abspath")
def test_hello_mock(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_mock(self, testdir):
pytest.importorskip("mock", "1.0.1")
testdir.makepyfile(
"""
import os
import unittest
import mock
class T(unittest.TestCase):
@mock.patch("os.path.abspath")
def test_hello(self, abspath):
os.path.abspath("hello")
abspath.assert_any_call("hello")
def mock_basename(path):
return "mock_basename"
@mock.patch("os.path.abspath")
@mock.patch("os.path.normpath")
@mock.patch("os.path.basename", new=mock_basename)
def test_someting(normpath, abspath, tmpdir):
abspath.return_value = "this"
os.path.normpath(os.path.abspath("hello"))
normpath.assert_any_call("this")
assert os.path.basename("123") == "mock_basename"
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
calls = reprec.getcalls("pytest_runtest_logreport")
funcnames = [
call.report.location[2] for call in calls if call.report.when == "call"
]
assert funcnames == ["T.test_hello", "test_someting"]
def test_mock_sorting(self, testdir):
pytest.importorskip("mock", "1.0.1")
testdir.makepyfile(
"""
import os
import mock
@mock.patch("os.path.abspath")
def test_one(abspath):
pass
@mock.patch("os.path.abspath")
def test_two(abspath):
pass
@mock.patch("os.path.abspath")
def test_three(abspath):
pass
"""
)
reprec = testdir.inline_run()
calls = reprec.getreports("pytest_runtest_logreport")
calls = [x for x in calls if x.when == "call"]
names = [x.nodeid.split("::")[-1] for x in calls]
assert names == ["test_one", "test_two", "test_three"]
def test_mock_double_patch_issue473(self, testdir):
pytest.importorskip("mock", "1.0.1")
testdir.makepyfile(
"""
from mock import patch
from pytest import mark
@patch('os.getcwd')
@patch('os.path')
@mark.slow
class TestSimple(object):
def test_simple_thing(self, mock_path, mock_getcwd):
pass
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
class TestReRunTests(object):
def test_rerun(self, testdir):
testdir.makeconftest(
"""
from _pytest.runner import runtestprotocol
def pytest_runtest_protocol(item, nextitem):
runtestprotocol(item, log=False, nextitem=nextitem)
runtestprotocol(item, log=True, nextitem=nextitem)
"""
)
testdir.makepyfile(
"""
import pytest
count = 0
req = None
@pytest.fixture
def fix(request):
global count, req
assert request != req
req = request
print ("fix count %s" % count)
count += 1
def test_fix(fix):
pass
"""
)
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines(
"""
*fix count 0*
*fix count 1*
"""
)
result.stdout.fnmatch_lines(
"""
*2 passed*
"""
)
def test_pytestconfig_is_session_scoped():
from _pytest.fixtures import pytestconfig
assert pytestconfig._pytestfixturefunction.scope == "session"
class TestNoselikeTestAttribute(object):
def test_module_with_global_test(self, testdir):
testdir.makepyfile(
"""
__test__ = False
def test_hello():
pass
"""
)
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
def test_class_and_method(self, testdir):
testdir.makepyfile(
"""
__test__ = True
def test_func():
pass
test_func.__test__ = False
class TestSome(object):
__test__ = False
def test_method(self):
pass
"""
)
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
def test_unittest_class(self, testdir):
testdir.makepyfile(
"""
import unittest
class TC(unittest.TestCase):
def test_1(self):
pass
class TC2(unittest.TestCase):
__test__ = False
def test_2(self):
pass
"""
)
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
call = reprec.getcalls("pytest_collection_modifyitems")[0]
assert len(call.items) == 1
assert call.items[0].cls.__name__ == "TC"
def test_class_with_nasty_getattr(self, testdir):
"""Make sure we handle classes with a custom nasty __getattr__ right.
With a custom __getattr__ which e.g. returns a function (like with a
RPC wrapper), we shouldn't assume this meant "__test__ = True".
"""
# https://github.com/pytest-dev/pytest/issues/1204
testdir.makepyfile(
"""
class MetaModel(type):
def __getattr__(cls, key):
return lambda: None
BaseModel = MetaModel('Model', (), {})
class Model(BaseModel):
__metaclass__ = MetaModel
def test_blah(self):
pass
"""
)
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
call = reprec.getcalls("pytest_collection_modifyitems")[0]
assert not call.items
@pytest.mark.issue351
class TestParameterize(object):
def test_idfn_marker(self, testdir):
testdir.makepyfile(
"""
import pytest
def idfn(param):
if param == 0:
return 'spam'
elif param == 1:
return 'ham'
else:
return None
@pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)
def test_params(a, b):
pass
"""
)
res = testdir.runpytest("--collect-only")
res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
def test_idfn_fixture(self, testdir):
testdir.makepyfile(
"""
import pytest
def idfn(param):
if param == 0:
return 'spam'
elif param == 1:
return 'ham'
else:
return None
@pytest.fixture(params=[0, 1], ids=idfn)
def a(request):
return request.param
@pytest.fixture(params=[1, 2], ids=idfn)
def b(request):
return request.param
def test_params(a, b):
pass
"""
)
res = testdir.runpytest("--collect-only")
res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
| mpl-2.0 |
2013Commons/hue | desktop/core/ext-py/tablib-develop/tablib/packages/markup3.py | 65 | 19129 | # This code is in the public domain, it comes
# with absolutely no warranty and you can do
# absolutely whatever you want with it.
__date__ = '17 May 2007'
__version__ = '1.7'
__doc__= """
This is markup.py - a Python module that attempts to
make it easier to generate HTML/XML from a Python program
in an intuitive, lightweight, customizable and pythonic way.
The code is in the public domain.
Version: %s as of %s.
Documentation and further info is at http://markup.sourceforge.net/
Please send bug reports, feature requests, enhancement
ideas or questions to nogradi at gmail dot com.
Installation: drop markup.py somewhere into your Python path.
""" % ( __version__, __date__ )
import string
class element:
"""This class handles the addition of a new element."""
def __init__( self, tag, case='lower', parent=None ):
self.parent = parent
if case == 'lower':
self.tag = tag.lower( )
else:
self.tag = tag.upper( )
def __call__( self, *args, **kwargs ):
if len( args ) > 1:
raise ArgumentError( self.tag )
# if class_ was defined in parent it should be added to every element
if self.parent is not None and self.parent.class_ is not None:
if 'class_' not in kwargs:
kwargs['class_'] = self.parent.class_
if self.parent is None and len( args ) == 1:
x = [ self.render( self.tag, False, myarg, mydict ) for myarg, mydict in _argsdicts( args, kwargs ) ]
return '\n'.join( x )
elif self.parent is None and len( args ) == 0:
x = [ self.render( self.tag, True, myarg, mydict ) for myarg, mydict in _argsdicts( args, kwargs ) ]
return '\n'.join( x )
if self.tag in self.parent.twotags:
for myarg, mydict in _argsdicts( args, kwargs ):
self.render( self.tag, False, myarg, mydict )
elif self.tag in self.parent.onetags:
if len( args ) == 0:
for myarg, mydict in _argsdicts( args, kwargs ):
self.render( self.tag, True, myarg, mydict ) # here myarg is always None, because len( args ) = 0
else:
raise ClosingError( self.tag )
elif self.parent.mode == 'strict_html' and self.tag in self.parent.deptags:
raise DeprecationError( self.tag )
else:
raise InvalidElementError( self.tag, self.parent.mode )
def render( self, tag, single, between, kwargs ):
"""Append the actual tags to content."""
out = "<%s" % tag
for key, value in kwargs.items( ):
if value is not None: # when value is None that means stuff like <... checked>
key = key.strip('_') # strip this so class_ will mean class, etc.
if key == 'http_equiv': # special cases, maybe change _ to - overall?
key = 'http-equiv'
elif key == 'accept_charset':
key = 'accept-charset'
out = "%s %s=\"%s\"" % ( out, key, escape( value ) )
else:
out = "%s %s" % ( out, key )
if between is not None:
out = "%s>%s</%s>" % ( out, between, tag )
else:
if single:
out = "%s />" % out
else:
out = "%s>" % out
if self.parent is not None:
self.parent.content.append( out )
else:
return out
def close( self ):
"""Append a closing tag unless element has only opening tag."""
if self.tag in self.parent.twotags:
self.parent.content.append( "</%s>" % self.tag )
elif self.tag in self.parent.onetags:
raise ClosingError( self.tag )
elif self.parent.mode == 'strict_html' and self.tag in self.parent.deptags:
raise DeprecationError( self.tag )
def open( self, **kwargs ):
"""Append an opening tag."""
if self.tag in self.parent.twotags or self.tag in self.parent.onetags:
self.render( self.tag, False, None, kwargs )
elif self.mode == 'strict_html' and self.tag in self.parent.deptags:
raise DeprecationError( self.tag )
class page:
"""This is our main class representing a document. Elements are added
as attributes of an instance of this class."""
def __init__( self, mode='strict_html', case='lower', onetags=None, twotags=None, separator='\n', class_=None ):
"""Stuff that effects the whole document.
mode -- 'strict_html' for HTML 4.01 (default)
'html' alias for 'strict_html'
'loose_html' to allow some deprecated elements
'xml' to allow arbitrary elements
case -- 'lower' element names will be printed in lower case (default)
'upper' they will be printed in upper case
onetags -- list or tuple of valid elements with opening tags only
twotags -- list or tuple of valid elements with both opening and closing tags
these two keyword arguments may be used to select
the set of valid elements in 'xml' mode
invalid elements will raise appropriate exceptions
separator -- string to place between added elements, defaults to newline
class_ -- a class that will be added to every element if defined"""
valid_onetags = [ "AREA", "BASE", "BR", "COL", "FRAME", "HR", "IMG", "INPUT", "LINK", "META", "PARAM" ]
valid_twotags = [ "A", "ABBR", "ACRONYM", "ADDRESS", "B", "BDO", "BIG", "BLOCKQUOTE", "BODY", "BUTTON",
"CAPTION", "CITE", "CODE", "COLGROUP", "DD", "DEL", "DFN", "DIV", "DL", "DT", "EM", "FIELDSET",
"FORM", "FRAMESET", "H1", "H2", "H3", "H4", "H5", "H6", "HEAD", "HTML", "I", "IFRAME", "INS",
"KBD", "LABEL", "LEGEND", "LI", "MAP", "NOFRAMES", "NOSCRIPT", "OBJECT", "OL", "OPTGROUP",
"OPTION", "P", "PRE", "Q", "SAMP", "SCRIPT", "SELECT", "SMALL", "SPAN", "STRONG", "STYLE",
"SUB", "SUP", "TABLE", "TBODY", "TD", "TEXTAREA", "TFOOT", "TH", "THEAD", "TITLE", "TR",
"TT", "UL", "VAR" ]
deprecated_onetags = [ "BASEFONT", "ISINDEX" ]
deprecated_twotags = [ "APPLET", "CENTER", "DIR", "FONT", "MENU", "S", "STRIKE", "U" ]
self.header = [ ]
self.content = [ ]
self.footer = [ ]
self.case = case
self.separator = separator
# init( ) sets it to True so we know that </body></html> has to be printed at the end
self._full = False
self.class_= class_
if mode == 'strict_html' or mode == 'html':
self.onetags = valid_onetags
self.onetags += list(map( str.lower, self.onetags ))
self.twotags = valid_twotags
self.twotags += list(map( str.lower, self.twotags ))
self.deptags = deprecated_onetags + deprecated_twotags
self.deptags += list(map( str.lower, self.deptags ))
self.mode = 'strict_html'
elif mode == 'loose_html':
self.onetags = valid_onetags + deprecated_onetags
self.onetags += list(map( str.lower, self.onetags ))
self.twotags = valid_twotags + deprecated_twotags
self.twotags += list(map( str.lower, self.twotags ))
self.mode = mode
elif mode == 'xml':
if onetags and twotags:
self.onetags = onetags
self.twotags = twotags
elif ( onetags and not twotags ) or ( twotags and not onetags ):
raise CustomizationError( )
else:
self.onetags = russell( )
self.twotags = russell( )
self.mode = mode
else:
raise ModeError( mode )
def __getattr__( self, attr ):
if attr.startswith("__") and attr.endswith("__"):
raise AttributeError(attr)
return element( attr, case=self.case, parent=self )
def __str__( self ):
if self._full and ( self.mode == 'strict_html' or self.mode == 'loose_html' ):
end = [ '</body>', '</html>' ]
else:
end = [ ]
return self.separator.join( self.header + self.content + self.footer + end )
def __call__( self, escape=False ):
"""Return the document as a string.
escape -- False print normally
True replace < and > by < and >
the default escape sequences in most browsers"""
if escape:
return _escape( self.__str__( ) )
else:
return self.__str__( )
def add( self, text ):
"""This is an alias to addcontent."""
self.addcontent( text )
def addfooter( self, text ):
"""Add some text to the bottom of the document"""
self.footer.append( text )
def addheader( self, text ):
"""Add some text to the top of the document"""
self.header.append( text )
def addcontent( self, text ):
"""Add some text to the main part of the document"""
self.content.append( text )
def init( self, lang='en', css=None, metainfo=None, title=None, header=None,
footer=None, charset=None, encoding=None, doctype=None, bodyattrs=None, script=None ):
"""This method is used for complete documents with appropriate
doctype, encoding, title, etc information. For an HTML/XML snippet
omit this method.
lang -- language, usually a two character string, will appear
as <html lang='en'> in html mode (ignored in xml mode)
css -- Cascading Style Sheet filename as a string or a list of
strings for multiple css files (ignored in xml mode)
metainfo -- a dictionary in the form { 'name':'content' } to be inserted
into meta element(s) as <meta name='name' content='content'>
(ignored in xml mode)
bodyattrs --a dictionary in the form { 'key':'value', ... } which will be added
as attributes of the <body> element as <body key='value' ... >
(ignored in xml mode)
script -- dictionary containing src:type pairs, <script type='text/type' src=src></script>
title -- the title of the document as a string to be inserted into
a title element as <title>my title</title> (ignored in xml mode)
header -- some text to be inserted right after the <body> element
(ignored in xml mode)
footer -- some text to be inserted right before the </body> element
(ignored in xml mode)
charset -- a string defining the character set, will be inserted into a
<meta http-equiv='Content-Type' content='text/html; charset=myset'>
element (ignored in xml mode)
encoding -- a string defining the encoding, will be put into to first line of
the document as <?xml version='1.0' encoding='myencoding' ?> in
xml mode (ignored in html mode)
doctype -- the document type string, defaults to
<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN'>
in html mode (ignored in xml mode)"""
self._full = True
if self.mode == 'strict_html' or self.mode == 'loose_html':
if doctype is None:
doctype = "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN'>"
self.header.append( doctype )
self.html( lang=lang )
self.head( )
if charset is not None:
self.meta( http_equiv='Content-Type', content="text/html; charset=%s" % charset )
if metainfo is not None:
self.metainfo( metainfo )
if css is not None:
self.css( css )
if title is not None:
self.title( title )
if script is not None:
self.scripts( script )
self.head.close()
if bodyattrs is not None:
self.body( **bodyattrs )
else:
self.body( )
if header is not None:
self.content.append( header )
if footer is not None:
self.footer.append( footer )
elif self.mode == 'xml':
if doctype is None:
if encoding is not None:
doctype = "<?xml version='1.0' encoding='%s' ?>" % encoding
else:
doctype = "<?xml version='1.0' ?>"
self.header.append( doctype )
def css( self, filelist ):
"""This convenience function is only useful for html.
It adds css stylesheet(s) to the document via the <link> element."""
if isinstance( filelist, str ):
self.link( href=filelist, rel='stylesheet', type='text/css', media='all' )
else:
for file in filelist:
self.link( href=file, rel='stylesheet', type='text/css', media='all' )
def metainfo( self, mydict ):
"""This convenience function is only useful for html.
It adds meta information via the <meta> element, the argument is
a dictionary of the form { 'name':'content' }."""
if isinstance( mydict, dict ):
for name, content in mydict.items( ):
self.meta( name=name, content=content )
else:
raise TypeError("Metainfo should be called with a dictionary argument of name:content pairs.")
def scripts( self, mydict ):
"""Only useful in html, mydict is dictionary of src:type pairs will
be rendered as <script type='text/type' src=src></script>"""
if isinstance( mydict, dict ):
for src, type in mydict.items( ):
self.script( '', src=src, type='text/%s' % type )
else:
raise TypeError("Script should be given a dictionary of src:type pairs.")
class _oneliner:
"""An instance of oneliner returns a string corresponding to one element.
This class can be used to write 'oneliners' that return a string
immediately so there is no need to instantiate the page class."""
def __init__( self, case='lower' ):
self.case = case
def __getattr__( self, attr ):
if attr.startswith("__") and attr.endswith("__"):
raise AttributeError(attr)
return element( attr, case=self.case, parent=None )
oneliner = _oneliner( case='lower' )
upper_oneliner = _oneliner( case='upper' )
def _argsdicts( args, mydict ):
"""A utility generator that pads argument list and dictionary values, will only be called with len( args ) = 0, 1."""
if len( args ) == 0:
args = None,
elif len( args ) == 1:
args = _totuple( args[0] )
else:
raise Exception("We should have never gotten here.")
mykeys = list(mydict.keys( ))
myvalues = list(map( _totuple, list(mydict.values( )) ))
maxlength = max( list(map( len, [ args ] + myvalues )) )
for i in range( maxlength ):
thisdict = { }
for key, value in zip( mykeys, myvalues ):
try:
thisdict[ key ] = value[i]
except IndexError:
thisdict[ key ] = value[-1]
try:
thisarg = args[i]
except IndexError:
thisarg = args[-1]
yield thisarg, thisdict
def _totuple( x ):
"""Utility stuff to convert string, int, float, None or anything to a usable tuple."""
if isinstance( x, str ):
out = x,
elif isinstance( x, ( int, float ) ):
out = str( x ),
elif x is None:
out = None,
else:
out = tuple( x )
return out
def escape( text, newline=False ):
"""Escape special html characters."""
if isinstance( text, str ):
if '&' in text:
text = text.replace( '&', '&' )
if '>' in text:
text = text.replace( '>', '>' )
if '<' in text:
text = text.replace( '<', '<' )
if '\"' in text:
text = text.replace( '\"', '"' )
if '\'' in text:
text = text.replace( '\'', '"' )
if newline:
if '\n' in text:
text = text.replace( '\n', '<br>' )
return text
_escape = escape
def unescape( text ):
"""Inverse of escape."""
if isinstance( text, str ):
if '&' in text:
text = text.replace( '&', '&' )
if '>' in text:
text = text.replace( '>', '>' )
if '<' in text:
text = text.replace( '<', '<' )
if '"' in text:
text = text.replace( '"', '\"' )
return text
class dummy:
"""A dummy class for attaching attributes."""
pass
doctype = dummy( )
doctype.frameset = "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Frameset//EN' 'http://www.w3.org/TR/html4/frameset.dtd'>"
doctype.strict = "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01//EN' 'http://www.w3.org/TR/html4/strict.dtd'>"
doctype.loose = "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN' 'http://www.w3.org/TR/html4/loose.dtd'>"
class russell:
"""A dummy class that contains anything."""
def __contains__( self, item ):
return True
class MarkupError( Exception ):
"""All our exceptions subclass this."""
def __str__( self ):
return self.message
class ClosingError( MarkupError ):
def __init__( self, tag ):
self.message = "The element '%s' does not accept non-keyword arguments (has no closing tag)." % tag
class OpeningError( MarkupError ):
def __init__( self, tag ):
self.message = "The element '%s' can not be opened." % tag
class ArgumentError( MarkupError ):
def __init__( self, tag ):
self.message = "The element '%s' was called with more than one non-keyword argument." % tag
class InvalidElementError( MarkupError ):
def __init__( self, tag, mode ):
self.message = "The element '%s' is not valid for your mode '%s'." % ( tag, mode )
class DeprecationError( MarkupError ):
def __init__( self, tag ):
self.message = "The element '%s' is deprecated, instantiate markup.page with mode='loose_html' to allow it." % tag
class ModeError( MarkupError ):
def __init__( self, mode ):
self.message = "Mode '%s' is invalid, possible values: strict_html, loose_html, xml." % mode
class CustomizationError( MarkupError ):
def __init__( self ):
self.message = "If you customize the allowed elements, you must define both types 'onetags' and 'twotags'."
if __name__ == '__main__':
print(__doc__)
| apache-2.0 |
igors10099/ilona | p2pool/util/deferral.py | 45 | 5679 | from __future__ import division
import itertools
import random
import sys
from twisted.internet import defer, reactor
from twisted.python import failure, log
def sleep(t):
d = defer.Deferred()
reactor.callLater(t, d.callback, None)
return d
def run_repeatedly(f, *args, **kwargs):
current_dc = [None]
def step():
delay = f(*args, **kwargs)
current_dc[0] = reactor.callLater(delay, step)
step()
def stop():
current_dc[0].cancel()
return stop
class RetrySilentlyException(Exception):
pass
def retry(message='Error:', delay=3, max_retries=None, traceback=True):
'''
@retry('Error getting block:', 1)
@defer.inlineCallbacks
def get_block(hash):
...
'''
def retry2(func):
@defer.inlineCallbacks
def f(*args, **kwargs):
for i in itertools.count():
try:
result = yield func(*args, **kwargs)
except Exception, e:
if i == max_retries:
raise
if not isinstance(e, RetrySilentlyException):
if traceback:
log.err(None, message)
else:
print >>sys.stderr, message, e
yield sleep(delay)
else:
defer.returnValue(result)
return f
return retry2
class ReplyMatcher(object):
'''
Converts request/got response interface to deferred interface
'''
def __init__(self, func, timeout=5):
self.func = func
self.timeout = timeout
self.map = {}
def __call__(self, id):
if id not in self.map:
self.func(id)
df = defer.Deferred()
def timeout():
self.map[id].remove((df, timer))
if not self.map[id]:
del self.map[id]
df.errback(failure.Failure(defer.TimeoutError('in ReplyMatcher')))
timer = reactor.callLater(self.timeout, timeout)
self.map.setdefault(id, set()).add((df, timer))
return df
def got_response(self, id, resp):
if id not in self.map:
return
for df, timer in self.map.pop(id):
df.callback(resp)
timer.cancel()
class GenericDeferrer(object):
'''
Converts query with identifier/got response interface to deferred interface
'''
def __init__(self, max_id, func, timeout=5, on_timeout=lambda: None):
self.max_id = max_id
self.func = func
self.timeout = timeout
self.on_timeout = on_timeout
self.map = {}
def __call__(self, *args, **kwargs):
while True:
id = random.randrange(self.max_id)
if id not in self.map:
break
def cancel(df):
df, timer = self.map.pop(id)
timer.cancel()
try:
df = defer.Deferred(cancel)
except TypeError:
df = defer.Deferred() # handle older versions of Twisted
def timeout():
self.map.pop(id)
df.errback(failure.Failure(defer.TimeoutError('in GenericDeferrer')))
self.on_timeout()
timer = reactor.callLater(self.timeout, timeout)
self.map[id] = df, timer
self.func(id, *args, **kwargs)
return df
def got_response(self, id, resp):
if id not in self.map:
return
df, timer = self.map.pop(id)
timer.cancel()
df.callback(resp)
def respond_all(self, resp):
while self.map:
id, (df, timer) = self.map.popitem()
timer.cancel()
df.errback(resp)
class NotNowError(Exception):
pass
class DeferredCacher(object):
'''
like memoize, but for functions that return Deferreds
@DeferredCacher
def f(x):
...
return df
@DeferredCacher.with_backing(bsddb.hashopen(...))
def f(x):
...
return df
'''
@classmethod
def with_backing(cls, backing):
return lambda func: cls(func, backing)
def __init__(self, func, backing=None):
if backing is None:
backing = {}
self.func = func
self.backing = backing
self.waiting = {}
@defer.inlineCallbacks
def __call__(self, key):
if key in self.waiting:
yield self.waiting[key]
if key in self.backing:
defer.returnValue(self.backing[key])
else:
self.waiting[key] = defer.Deferred()
try:
value = yield self.func(key)
finally:
self.waiting.pop(key).callback(None)
self.backing[key] = value
defer.returnValue(value)
_nothing = object()
def call_now(self, key, default=_nothing):
if key in self.backing:
return self.backing[key]
if key not in self.waiting:
self.waiting[key] = defer.Deferred()
def cb(value):
self.backing[key] = value
self.waiting.pop(key).callback(None)
def eb(fail):
self.waiting.pop(key).callback(None)
if fail.check(RetrySilentlyException):
return
print
print 'Error when requesting noncached value:'
fail.printTraceback()
print
self.func(key).addCallback(cb).addErrback(eb)
if default is not self._nothing:
return default
raise NotNowError(key)
| gpl-3.0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.