signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
---|---|---|---|
def load_library(self,libname): | paths = self.getpaths(libname)<EOL>for path in paths:<EOL><INDENT>if os.path.exists(path):<EOL><INDENT>return self.load(path)<EOL><DEDENT><DEDENT>raise ImportError("<STR_LIT>" % libname)<EOL> | Given the name of a library, load it. | f2107:c5:m1 |
def load(self,path): | try:<EOL><INDENT>if sys.platform == '<STR_LIT>':<EOL><INDENT>return ctypes.CDLL(path, ctypes.RTLD_GLOBAL)<EOL><DEDENT>else:<EOL><INDENT>return ctypes.cdll.LoadLibrary(path)<EOL><DEDENT><DEDENT>except OSError as e:<EOL><INDENT>raise ImportError(e)<EOL><DEDENT> | Given a path to a library, load it. | f2107:c5:m2 |
def getpaths(self,libname): | if os.path.isabs(libname):<EOL><INDENT>yield libname<EOL><DEDENT>else:<EOL><INDENT>for path in self.getplatformpaths(libname):<EOL><INDENT>yield path<EOL><DEDENT>path = ctypes.util.find_library(libname)<EOL>if path: yield path<EOL><DEDENT> | Return a list of paths where the library might be found. | f2107:c5:m3 |
def getdirs(self,libname): | dyld_fallback_library_path = _environ_path("<STR_LIT>")<EOL>if not dyld_fallback_library_path:<EOL><INDENT>dyld_fallback_library_path = [os.path.expanduser('<STR_LIT>'),<EOL>'<STR_LIT>', '<STR_LIT>']<EOL><DEDENT>dirs = []<EOL>if '<STR_LIT:/>' in libname:<EOL><INDENT>dirs.extend(_environ_path("<STR_LIT>"))<EOL><DEDENT>else:<EOL><INDENT>dirs.extend(_environ_path("<STR_LIT>"))<EOL>dirs.extend(_environ_path("<STR_LIT>"))<EOL><DEDENT>dirs.extend(self.other_dirs)<EOL>dirs.append("<STR_LIT:.>")<EOL>dirs.append(os.path.dirname(__file__))<EOL>if hasattr(sys, '<STR_LIT>') and sys.frozen == '<STR_LIT>':<EOL><INDENT>dirs.append(os.path.join(<EOL>os.environ['<STR_LIT>'],<EOL>'<STR_LIT:..>',<EOL>'<STR_LIT>'))<EOL><DEDENT>dirs.extend(dyld_fallback_library_path)<EOL>return dirs<EOL> | Implements the dylib search as specified in Apple documentation:
http://developer.apple.com/documentation/DeveloperTools/Conceptual/
DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
Before commencing the standard search, the method first checks
the bundle's ``Frameworks`` directory if the application is running
within a bundle (OS X .app). | f2107:c6:m1 |
def get_foo_bar(): | def on_foo(*args, **kwargs):<EOL><INDENT>"""<STR_LIT>"""<EOL><DEDENT>def on_bar():<EOL><INDENT>"""<STR_LIT>"""<EOL><DEDENT>return on_foo, on_bar<EOL> | Helper function that returns two functions, on_foo() and on_bar(), similar
to what :class:`C1` and :class:`C2` define internally. | f2110:m1 |
def M1(): | on_foo, on_bar = get_foo_bar()<EOL>ns = NS()<EOL>ns.on_foo_func = on_foo<EOL>ns.on_foo = Signal.define(on_foo)<EOL>ns.on_bar = Signal.define(on_bar)<EOL>return ns<EOL> | Helper function that returns a module-like thing with two signals defined
using :meth:`Signal.define` | f2110:m2 |
def M2(): | on_foo, on_bar = get_foo_bar()<EOL>ns = NS()<EOL>ns.on_foo_func = on_foo<EOL>ns.on_foo = signal(on_foo)<EOL>ns.on_bar = signal(on_bar)<EOL>return ns<EOL> | Helper function that returns a module-like thing with two signals defined
using :class:`signal` | f2110:m3 |
def runTest(self): | An empty test method | f2110:c0:m0 |
|
def on_foo(self, *args, **kwargs): | A signal accepting (ignoring) arbitrary arguments | f2110:c2:m0 |
|
@Signal.define<EOL><INDENT>def on_bar(self):<DEDENT> | A signal accepting no arguments | f2110:c2:m1 |
|
def on_foo(self, *args, **kwargs): | A signal accepting (ignoring) arbitrary arguments | f2110:c3:m0 |
|
@signal<EOL><INDENT>def on_bar(self):<DEDENT> | A signal accepting no arguments | f2110:c3:m1 |
|
def remove_signals_listeners(instance): | if hasattr(instance, "<STR_LIT>"):<EOL><INDENT>for listener in list(instance.__listeners__):<EOL><INDENT>for signal in instance.__listeners__[listener]:<EOL><INDENT>signal.disconnect(listener)<EOL><DEDENT><DEDENT><DEDENT> | utility function that disconnects all listeners from all signals on an
object | f2111:m1 |
def __init__(self, name_or_first_responder, pass_signal=False,<EOL>signal_name=None): | if isinstance(name_or_first_responder, self._str_bases):<EOL><INDENT>first_responder = None<EOL>name = name_or_first_responder<EOL><DEDENT>else:<EOL><INDENT>first_responder = name_or_first_responder<EOL>name = signal_name or _get_fn_name(first_responder)<EOL><DEDENT>self._name = name<EOL>self._first_responder = first_responder<EOL>self._listeners = []<EOL>if first_responder is not None:<EOL><INDENT>self._listeners.append(listenerinfo(first_responder, pass_signal))<EOL><DEDENT> | Construct a signal with the given name
:param name_or_first_responder:
Either the name of the signal to construct or a callable which
will be the first responder. In the latter case the callable is
used to obtain the name of the signal.
:param pass_signal:
An optional flag that instructs morris to pass the signal object
itself to the first responder (as the ``signal`` argument). This is
only used in the case where ``name_or_first_responder`` is a
callable.
:param signal_name:
Optional name of the signal. This is meaningful only when the first
argument ``name_or_first_responder`` is a callable. When that
happens this argument is used and no guessing based on __qualname__
or __name__ is being used. | f2111:c0:m0 |
def __repr__(self): | if (len(self._listeners) > <NUM_LIT:0><EOL>and isinstance(self.listeners[<NUM_LIT:0>].listener, boundmethod)):<EOL><INDENT>return "<STR_LIT>".format(<EOL>str(self._name), self._listeners[<NUM_LIT:0>].listener.instance)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>".format(str(self._name))<EOL><DEDENT> | A representation of the signal.
There are two possible representations:
- a signal object created via a signal descriptor on an object
- a signal object acting as a descriptor or function decorator | f2111:c0:m1 |
def __get__(self, instance, owner): | if instance is None:<EOL><INDENT>return self<EOL><DEDENT>if not hasattr(instance, "<STR_LIT>"):<EOL><INDENT>instance.__signals__ = {}<EOL><DEDENT>if self._name not in instance.__signals__:<EOL><INDENT>signal = Signal(self._name)<EOL>signal.connect(boundmethod(instance, self._first_responder))<EOL>instance.__signals__[self._name] = signal<EOL><DEDENT>return instance.__signals__[self._name]<EOL> | Descriptor __get__ method
This method is called when a signal-decorated method is being accessed
via an object or a class. It is never called for decorated functions.
:param instance:
Instance of the object the descriptor is being used on.
This is None when the descriptor is accessed on a class.
:param owner:
The class that the descriptor is defined on.
:returns:
If ``instance`` is None we return ourselves, this is what
descriptors typically do. If ``instance`` is not None we return a
unique :class:`Signal` instance that is specific to that object and
signal. This is implemented by storing the signal inside the
object's __signals__ attribute. | f2111:c0:m2 |
@property<EOL><INDENT>def name(self):<DEDENT> | return self._name<EOL> | Name of the signal
For signals constructed manually (i.e. by calling :class:`Signal()`)
the name is arbitrary. For signals constructed using either
:meth:`Signal.define()` or :class:`signal` the name is obtained
from the decorated function.
On python 3.3+ the qualified name is used (see :pep:`3155`), on earlier
versions the plain name is used (without the class name). The name is
identical regardless of how the signal is being accessed:
>>> class C(object):
... @signal
... def on_meth(self):
... pass
As a descriptor on a class:
>>> C.on_meth.name # doctest: +ELLIPSIS
'...on_meth'
As a descriptor on an object:
>>> C().on_meth.name # doctest: +ELLIPSIS
'...on_meth'
As a decorated function:
>>> @signal
... def on_func():
... pass
>>> on_func.name
'on_func' | f2111:c0:m5 |
@property<EOL><INDENT>def listeners(self):<DEDENT> | return self._listeners<EOL> | List of :class:`listenerinfo` objects associated with this signal
The list of listeners is considered part of an implementation detail
but is exposed for convenience. This is always the real list. Keep
this in mind while connecting and disconnecting listeners. During
the time :meth:`fire()` is called the list of listeners can be changed
but won't take effect until after ``fire()`` returns. | f2111:c0:m6 |
@property<EOL><INDENT>def first_responder(self):<DEDENT> | return self._first_responder<EOL> | The first responder function.
This is the function that the ``signal`` may have been instantiated
with. It is only relevant if the signal itself is used as a
*descriptor* in a class (where it decorates a method).
For example, contrast the access of the signal on the class and on a
class instance:
>>> class C(object):
... @signal
... def on_foo(self):
... pass
Class access gives uses the descriptor protocol to expose the
actual signal object.
>>> C.on_foo # doctest: +ELLIPSIS
<signal name:'...on_foo'>
Here we can use the ``first_responder`` property to see the actual
function.
>>> C.on_foo.first_responder # doctest: +ELLIPSIS
<function ...on_foo at ...>
Object access is different as now the signal instance is specific to
the object:
>>> C().on_foo # doctest: +ELLIPSIS
<signal name:'...on_foo' (specific to <morris.C object at ...)>
And now the first responder is gone (it is now buried inside the
:meth:`listeners` list):
>>> C().on_foo.first_responder | f2111:c0:m7 |
def connect(self, listener, pass_signal=False): | info = listenerinfo(listener, pass_signal)<EOL>self._listeners.append(info)<EOL>_logger.debug("<STR_LIT>", str(listener), self._name)<EOL>if inspect.ismethod(listener):<EOL><INDENT>listener_object = listener.__self__<EOL>if not hasattr(listener_object, "<STR_LIT>"):<EOL><INDENT>listener_object.__listeners__ = collections.defaultdict(list)<EOL><DEDENT>listener_object.__listeners__[listener].append(self)<EOL><DEDENT> | Connect a new listener to this signal
:param listener:
The listener (callable) to add
:param pass_signal:
An optional argument that controls if the signal object is
explicitly passed to this listener when it is being fired.
If enabled, a ``signal=`` keyword argument is passed to the
listener function.
:returns:
None
The listener will be called whenever :meth:`fire()` or
:meth:`__call__()` are called. The listener is appended to the list of
listeners. Duplicates are not checked and if a listener is added twice
it gets called twice. | f2111:c0:m8 |
def disconnect(self, listener, pass_signal=False): | info = listenerinfo(listener, pass_signal)<EOL>self._listeners.remove(info)<EOL>_logger.debug(<EOL>"<STR_LIT>", str(listener), self._name)<EOL>if inspect.ismethod(listener):<EOL><INDENT>listener_object = listener.__self__<EOL>if hasattr(listener_object, "<STR_LIT>"):<EOL><INDENT>listener_object.__listeners__[listener].remove(self)<EOL>if (len(listener_object.__listeners__[listener])) == <NUM_LIT:0>:<EOL><INDENT>del listener_object.__listeners__[listener]<EOL><DEDENT><DEDENT><DEDENT> | Disconnect an existing listener from this signal
:param listener:
The listener (callable) to remove
:param pass_signal:
An optional argument that controls if the signal object is
explicitly passed to this listener when it is being fired.
If enabled, a ``signal=`` keyword argument is passed to the
listener function.
Here, this argument simply aids in disconnecting the right
listener. Make sure to pass the same value as was passed to
:meth:`connect()`
:raises ValueError:
If the listener (with the same value of pass_signal) is not present
:returns:
None | f2111:c0:m9 |
def fire(self, args, kwargs): | for info in self._listeners[:]:<EOL><INDENT>if info.pass_signal:<EOL><INDENT>info.listener(*args, signal=self, **kwargs)<EOL><DEDENT>else:<EOL><INDENT>info.listener(*args, **kwargs)<EOL><DEDENT><DEDENT> | Fire this signal with the specified arguments and keyword arguments.
Typically this is used by using :meth:`__call__()` on this object which
is more natural as it does all the argument packing/unpacking
transparently. | f2111:c0:m10 |
def __call__(self, *args, **kwargs): | self.fire(args, kwargs)<EOL> | Call fire() with all arguments forwarded transparently
This is provided for convenience so that a signal can be fired just
by a simple method or function call and so that signals can be passed
to other APIs that don't understand the :meth:`fire()` method. | f2111:c0:m11 |
def watchSignal(self, signal): | self._extend_state()<EOL>def signal_handler(*args, **kwargs):<EOL><INDENT>self._events_seen.append((signal, args, kwargs))<EOL><DEDENT>signal.connect(signal_handler)<EOL>if hasattr(self, '<STR_LIT>'):<EOL><INDENT>self.addCleanup(signal.disconnect, signal_handler)<EOL><DEDENT> | Setup provisions to watch a specified signal
:param signal:
The :class:`Signal` to watch for.
After calling this method you can use :meth:`assertSignalFired()`
and :meth:`assertSignalNotFired()` with the same signal. | f2111:c2:m1 |
def assertSignalFired(self, signal, *args, **kwargs): | event = (signal, args, kwargs)<EOL>self.assertIn(<EOL>event, self._events_seen,<EOL>"<STR_LIT>".format(event))<EOL>return event<EOL> | Assert that a signal was fired with appropriate arguments.
:param signal:
The :class:`Signal` that should have been fired.
Typically this is ``SomeClass.on_some_signal`` reference
:param args:
List of positional arguments passed to the signal handler
:param kwargs:
List of keyword arguments passed to the signal handler
:returns:
A 3-tuple (signal, args, kwargs) that describes that event | f2111:c2:m2 |
def assertSignalNotFired(self, signal, *args, **kwargs): | event = (signal, args, kwargs)<EOL>self.assertNotIn(<EOL>event, self._events_seen,<EOL>"<STR_LIT>".format(event))<EOL> | Assert that a signal was fired with appropriate arguments.
:param signal:
The :class:`Signal` that should not have been fired.
Typically this is ``SomeClass.on_some_signal`` reference
:param args:
List of positional arguments passed to the signal handler
:param kwargs:
List of keyword arguments passed to the signal handler | f2111:c2:m3 |
def assertSignalOrdering(self, *expected_events): | expected_order = [self._events_seen.index(event)<EOL>for event in expected_events]<EOL>actual_order = sorted(expected_order)<EOL>self.assertEqual(<EOL>expected_order, actual_order,<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>"<STR_LIT:\n>".join(<EOL>"<STR_LIT>".format(i, event)<EOL>for i, event in enumerate(expected_events, <NUM_LIT:1>)),<EOL>"<STR_LIT:\n>".join(<EOL>"<STR_LIT>".format(i, event)<EOL>for i, event in enumerate(<EOL>(self._events_seen[idx] for idx in actual_order), <NUM_LIT:1>))))<EOL> | Assert that a signals were fired in a specific sequence.
:param expected_events:
A (varadic) list of events describing the signals that were fired
Each element is a 3-tuple (signal, args, kwargs) that describes
the event.
.. note::
If you are using :meth:`assertSignalFired()` then the return value
of that method is a single event that can be passed to this method | f2111:c2:m4 |
def declare_selfvars(self): | self._dictErr = {<EOL>'<STR_LIT>' : {<EOL>'<STR_LIT:action>' : '<STR_LIT>',<EOL>'<STR_LIT:error>' : '<STR_LIT>',<EOL>'<STR_LIT>' : <NUM_LIT:1>},<EOL>'<STR_LIT>' : {<EOL>'<STR_LIT:action>' : '<STR_LIT>',<EOL>'<STR_LIT:error>' : '<STR_LIT>',<EOL>'<STR_LIT>' : <NUM_LIT:2>},<EOL>'<STR_LIT>' : {<EOL>'<STR_LIT:action>' : '<STR_LIT>',<EOL>'<STR_LIT:error>' : '<STR_LIT>',<EOL>'<STR_LIT>' : <NUM_LIT:3>},<EOL>'<STR_LIT>' : {<EOL>'<STR_LIT:action>' : '<STR_LIT>',<EOL>'<STR_LIT:error>' : '<STR_LIT>',<EOL>'<STR_LIT>' : <NUM_LIT:4>}<EOL>}<EOL>self.str_desc = '<STR_LIT>'<EOL>self.__name__ = "<STR_LIT>"<EOL>self.str_version = "<STR_LIT>"<EOL>self.within = None<EOL>self.numThreads = <NUM_LIT:1><EOL>self.str_inputDir = '<STR_LIT>'<EOL>self.str_inputFile = '<STR_LIT>'<EOL>self.str_outputDir = '<STR_LIT>'<EOL>self.d_inputTree = {}<EOL>self.d_inputTreeCallback = {}<EOL>self.d_outputTree = {}<EOL>self.str_outputLeafDir = '<STR_LIT>'<EOL>self.maxdepth = -<NUM_LIT:1><EOL>self.b_persistAnalysisResults = False<EOL>self.b_relativeDir = False<EOL>self.b_stats = False<EOL>self.b_statsReverse = False<EOL>self.b_jsonStats = False<EOL>self.b_json = False<EOL>self.b_test = False<EOL>self.b_followLinks = False<EOL>self.str_sleepLength = '<STR_LIT>'<EOL>self.f_sleepLength = <NUM_LIT:0.0><EOL>self.testType = <NUM_LIT:0><EOL>self.dp = None<EOL>self.log = None<EOL>self.tic_start = <NUM_LIT:0.0><EOL>self.pp = pprint.PrettyPrinter(indent=<NUM_LIT:4>)<EOL>self.verbosityLevel = <NUM_LIT:1><EOL> | A block to declare self variables | f2113:c0:m0 |
@staticmethod<EOL><INDENT>def walklevel(path, depth = -<NUM_LIT:1>, **kwargs):<DEDENT> | <EOL>if depth < <NUM_LIT:0>:<EOL><INDENT>for root, dirs, files in os.walk(path, **kwargs):<EOL><INDENT>yield root, dirs, files<EOL><DEDENT><DEDENT>path = path.rstrip(os.path.sep)<EOL>num_sep = path.count(os.path.sep)<EOL>for root, dirs, files in os.walk(path, **kwargs):<EOL><INDENT>yield root, dirs, files<EOL>num_sep_this = root.count(os.path.sep)<EOL>if num_sep + depth <= num_sep_this:<EOL><INDENT>del dirs[:]<EOL><DEDENT><DEDENT> | It works just like os.walk, but you can pass it a level parameter
that indicates how deep the recursion will go.
If depth is -1 (or less than 0), the full depth is walked. | f2113:c0:m3 |
def tree_probe(self, **kwargs): | str_topDir = "<STR_LIT:.>"<EOL>l_dirs = []<EOL>l_files = []<EOL>b_status = False<EOL>str_path = '<STR_LIT>'<EOL>l_dirsHere = []<EOL>l_filesHere = []<EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT:root>': str_topDir = v<EOL><DEDENT>for root, dirs, files in pftree.walklevel(str_topDir,<EOL>self.maxdepth, <EOL>followlinks = self.b_followLinks):<EOL><INDENT>b_status = True<EOL>str_path = root.split(os.sep)<EOL>if dirs:<EOL><INDENT>l_dirsHere = [root + '<STR_LIT:/>' + x for x in dirs]<EOL>l_dirs.append(l_dirsHere)<EOL>self.dp.qprint('<STR_LIT>', level = <NUM_LIT:3>)<EOL>self.dp.qprint("<STR_LIT:\n>" + self.pp.pformat(l_dirsHere), level = <NUM_LIT:3>)<EOL><DEDENT>if files:<EOL><INDENT>l_filesHere = [root + '<STR_LIT:/>' + y for y in files]<EOL>if len(self.str_inputFile):<EOL><INDENT>l_hit = [s for s in l_filesHere if self.str_inputFile in s]<EOL>if l_hit: <EOL><INDENT>l_filesHere = l_hit<EOL><DEDENT>else:<EOL><INDENT>l_filesHere = []<EOL><DEDENT><DEDENT>if l_filesHere:<EOL><INDENT>l_files.append(l_filesHere)<EOL><DEDENT>self.dp.qprint('<STR_LIT>', level = <NUM_LIT:3>)<EOL>self.dp.qprint("<STR_LIT:\n>" + self.pp.pformat(l_filesHere), level = <NUM_LIT:3>)<EOL><DEDENT><DEDENT>return {<EOL>'<STR_LIT:status>': b_status,<EOL>'<STR_LIT>': l_dirs,<EOL>'<STR_LIT>': l_files<EOL>}<EOL> | Perform an os walk down a file system tree, starting from
a **kwargs identified 'root', and return lists of files and
directories found.
kwargs:
root = '/some/path'
return {
'status': True,
'l_dir': l_dirs,
'l_files': l_files
} | f2113:c0:m4 |
def tree_construct(self, *args, **kwargs): | l_files = []<EOL>d_constructCallback = {}<EOL>fn_constructCallback = None<EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': l_files = v<EOL>if k == '<STR_LIT>': fn_constructCallback = v<EOL><DEDENT>index = <NUM_LIT:1><EOL>total = len(l_files)<EOL>for l_series in l_files:<EOL><INDENT>str_path = os.path.dirname(l_series[<NUM_LIT:0>])<EOL>l_series = [ os.path.basename(i) for i in l_series]<EOL>self.simpleProgress_show(index, total)<EOL>self.d_inputTree[str_path] = l_series<EOL>if fn_constructCallback:<EOL><INDENT>kwargs['<STR_LIT:path>'] = str_path<EOL>d_constructCallback = fn_constructCallback(l_series, **kwargs)<EOL>self.d_inputTreeCallback[str_path] = d_constructCallback<EOL><DEDENT>self.d_outputTree[str_path] = "<STR_LIT>"<EOL>index += <NUM_LIT:1><EOL><DEDENT>return {<EOL>'<STR_LIT:status>': True,<EOL>'<STR_LIT>': d_constructCallback, <EOL>'<STR_LIT>': index<EOL>}<EOL> | Processes the <l_files> list of files from the tree_probe()
and builds the input/output dictionary structures.
Optionally execute a constructCallback function, and return
results | f2113:c0:m5 |
@staticmethod<EOL><INDENT>def dirsize_get(l_filesWithoutPath, **kwargs):<DEDENT> | str_path = "<STR_LIT>"<EOL>for k,v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT:path>': str_path = v<EOL><DEDENT>d_ret = {}<EOL>l_size = []<EOL>size = <NUM_LIT:0><EOL>for f in l_filesWithoutPath:<EOL><INDENT>str_f = '<STR_LIT>' % (str_path, f)<EOL>if not os.path.islink(str_f):<EOL><INDENT>try:<EOL><INDENT>size += os.path.getsize(str_f)<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>str_size = pftree.sizeof_fmt(size)<EOL>return {<EOL>'<STR_LIT:status>': True,<EOL>'<STR_LIT>': size,<EOL>'<STR_LIT>': str_size<EOL>}<EOL> | Sample callback that determines a directory size. | f2113:c0:m7 |
def tree_process(self, *args, **kwargs): | str_applyResultsTo = "<STR_LIT>"<EOL>str_applyKey = "<STR_LIT>"<EOL>fn_inputReadCallback = None<EOL>fn_analysisCallback = None<EOL>fn_outputWriteCallback = None<EOL>b_persistAnalysisResults = False<EOL>d_tree = self.d_outputTree<EOL>str_processType = '<STR_LIT>'<EOL>dret_inputSet = {}<EOL>dret_analysis = {}<EOL>dret_outputSet = {}<EOL>filesRead = <NUM_LIT:0><EOL>filesAnalyzed = <NUM_LIT:0><EOL>filesSaved = <NUM_LIT:0><EOL>def thread_batch(l_threadFunc, outerLoop, innerLoop, offset):<EOL><INDENT>"""<STR_LIT>"""<EOL>start = <NUM_LIT:0><EOL>join = <NUM_LIT:0><EOL>il = lambda f, i, o, l : f + i + o * l<EOL>for t_o in range(<NUM_LIT:0>, outerLoop):<EOL><INDENT>for t_i in range(<NUM_LIT:0>, innerLoop):<EOL><INDENT>idx = il(offset, t_i, t_o, innerLoop)<EOL>l_threadFunc[idx].start()<EOL>start += <NUM_LIT:1><EOL><DEDENT>for t_i in range(<NUM_LIT:0>, innerLoop):<EOL><INDENT>idx = il(offset, t_i, t_o, innerLoop)<EOL>l_threadFunc[idx].join()<EOL>join += <NUM_LIT:1><EOL><DEDENT><DEDENT>return start<EOL><DEDENT>def inputSet_read(path, data):<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal filesRead<EOL>nonlocal index<EOL>nonlocal d_tree<EOL>nonlocal fn_inputReadCallback<EOL>self.simpleProgress_show(index, total, '<STR_LIT>' % <EOL>('<STR_LIT>' %threading.currentThread().getName(), <EOL>'<STR_LIT>' % fn_inputReadCallback.__name__)<EOL>)<EOL>d_read = fn_inputReadCallback(<EOL>('<STR_LIT>' % (self.str_inputDir, path), data), **kwargs<EOL>)<EOL>if '<STR_LIT:status>' in d_read.keys():<EOL><INDENT>d_tree[path] = d_read<EOL>if '<STR_LIT>' in d_read.keys():<EOL><INDENT>filesRead += d_read['<STR_LIT>']<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.dp.qprint(<EOL>"<STR_LIT>",<EOL>comms = '<STR_LIT:error>',<EOL>level = <NUM_LIT:0><EOL>)<EOL>error.fatal(self, '<STR_LIT>', drawBox = True)<EOL><DEDENT>return d_read<EOL><DEDENT>def analysis_do(path, data, index, **kwargs):<EOL><INDENT>nonlocal filesAnalyzed<EOL>nonlocal d_tree<EOL>nonlocal fn_analysisCallback<EOL>self.simpleProgress_show(index, total, '<STR_LIT>' % <EOL>('<STR_LIT>' % threading.currentThread().getName(), <EOL>'<STR_LIT>' % fn_analysisCallback.__name__)<EOL>)<EOL>d_analysis = fn_analysisCallback(<EOL>('<STR_LIT>' % (self.str_inputDir, path), d_tree[path]), **kwargs<EOL>)<EOL>if '<STR_LIT:status>' in d_analysis.keys():<EOL><INDENT>if d_analysis['<STR_LIT:status>']:<EOL><INDENT>if len(str_applyKey):<EOL><INDENT>d_tree[path] = d_analysis[str_applyKey]<EOL><DEDENT>else:<EOL><INDENT>d_tree[path] = d_analysis<EOL><DEDENT>if '<STR_LIT>' in d_analysis.keys(): <EOL><INDENT>filesAnalyzed += d_analysis['<STR_LIT>']<EOL><DEDENT>elif '<STR_LIT>' in d_analysis.keys():<EOL><INDENT>filesAnalyzed += len(d_analysis['<STR_LIT>'])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>d_tree[path] = None<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.dp.qprint(<EOL>"<STR_LIT>",<EOL>comms = '<STR_LIT:error>',<EOL>level = <NUM_LIT:0><EOL>)<EOL>error.fatal(self, '<STR_LIT>', drawBox = True)<EOL><DEDENT>return d_analysis<EOL><DEDENT>def tree_removeDeadBranches():<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal d_tree<EOL>d_tree = { k : v for k, v in d_tree.items() if v}<EOL>self.d_inputTree = d_tree<EOL>self.d_outputTree = self.d_inputTree.copy()<EOL><DEDENT>def outputSet_write(path, data):<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal filesSaved<EOL>nonlocal index<EOL>nonlocal d_tree<EOL>nonlocal fn_analysisCallback<EOL>nonlocal b_persistAnalysisResults<EOL>self.simpleProgress_show(index, total, '<STR_LIT>' % <EOL>('<STR_LIT>' % threading.currentThread().getName(), <EOL>'<STR_LIT>' % fn_outputWriteCallback.__name__)<EOL>)<EOL>if len(self.str_outputLeafDir):<EOL><INDENT>(dirname, basename) = os.path.split(path)<EOL>str_format = '<STR_LIT>' % self.str_outputLeafDir<EOL>new_basename = str_format + '<STR_LIT>'<EOL>str_eval = eval(new_basename)<EOL>path = '<STR_LIT>' % (dirname, str_eval)<EOL><DEDENT>d_output = fn_outputWriteCallback(<EOL>( '<STR_LIT>' % (self.str_outputDir, path), data), **kwargs<EOL>)<EOL>if '<STR_LIT:status>' in d_output.keys():<EOL><INDENT>if not b_persistAnalysisResults:<EOL><INDENT>d_tree[path] = d_output<EOL><DEDENT>filesSaved += d_output['<STR_LIT>']<EOL><DEDENT>else:<EOL><INDENT>self.dp.qprint(<EOL>"<STR_LIT>",<EOL>comms = '<STR_LIT:error>',<EOL>level = <NUM_LIT:0><EOL>)<EOL>error.fatal(self, '<STR_LIT>', drawBox = True)<EOL><DEDENT>return d_output<EOL><DEDENT>def loop_nonThreaded():<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal index, total<EOL>nonlocal d_tree<EOL>nonlocal fn_inputReadCallback<EOL>nonlocal fn_analysisCallback<EOL>nonlocal fn_outputWriteCallback<EOL>nonlocal dret_inputSet<EOL>nonlocal dret_analysis<EOL>nonlocal dret_outputSet<EOL>for path, data in self.d_inputTree.items():<EOL><INDENT>if fn_inputReadCallback: dret_inputSet = inputSet_read(path, data)<EOL>if fn_analysisCallback: dret_analyze = analysis_do(path, d_tree[path], index)<EOL>if fn_outputWriteCallback: dret_outputSet = outputSet_write(path, d_tree[path])<EOL>index += <NUM_LIT:1><EOL><DEDENT>tree_removeDeadBranches()<EOL><DEDENT>def loop_threaded():<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal index, total<EOL>nonlocal d_tree<EOL>nonlocal fn_inputReadCallback<EOL>nonlocal fn_analysisCallback<EOL>nonlocal fn_outputWriteCallback<EOL>nonlocal dret_inputSet<EOL>nonlocal dret_analysis<EOL>nonlocal dret_outputSet<EOL>def thread_createOnFunction(path, data, str_namePrefix, fn_thread):<EOL><INDENT>"""<STR_LIT>"""<EOL>nonlocal index<EOL>ta = threading.Thread(<EOL>name = '<STR_LIT>' % (str_namePrefix, index, self.numThreads),<EOL>target = fn_thread,<EOL>args = (path, data, index),<EOL>kwargs = kwargs<EOL>)<EOL>return ta<EOL><DEDENT>def threadsInBatches_run(l_threadAnalysis):<EOL><INDENT>"""<STR_LIT>"""<EOL>index = <NUM_LIT:1><EOL>if self.numThreads > total:<EOL><INDENT>self.numThreads = total<EOL><DEDENT>threadFullLoops = int(total / self.numThreads)<EOL>threadRem = total % self.numThreads<EOL>alreadyRunCount = thread_batch(<EOL>l_threadAnalysis,<EOL>threadFullLoops, <EOL>self.numThreads, <EOL><NUM_LIT:0>)<EOL>nextRunCount = thread_batch(<EOL>l_threadAnalysis,<EOL><NUM_LIT:1>, <EOL>threadRem, <EOL>alreadyRunCount)<EOL><DEDENT>if fn_inputReadCallback:<EOL><INDENT>index = <NUM_LIT:1><EOL>for path, data in self.d_inputTree.items():<EOL><INDENT>dret_inputSet = inputSet_read(path, data) <EOL>index += <NUM_LIT:1><EOL><DEDENT><DEDENT>if fn_analysisCallback:<EOL><INDENT>index = <NUM_LIT:1><EOL>l_threadAnalysis = []<EOL>for path, data in self.d_inputTree.items():<EOL><INDENT>l_threadAnalysis.append(thread_createOnFunction(<EOL>path, data,<EOL>'<STR_LIT>',<EOL>analysis_do<EOL>)<EOL>)<EOL>index += <NUM_LIT:1><EOL><DEDENT>threadsInBatches_run(l_threadAnalysis)<EOL>tree_removeDeadBranches()<EOL><DEDENT>if fn_outputWriteCallback:<EOL><INDENT>index = <NUM_LIT:1><EOL>for path, data in self.d_inputTree.items():<EOL><INDENT>dret_outputSet = outputSet_write(path, d_tree[path])<EOL>index += <NUM_LIT:1><EOL><DEDENT><DEDENT><DEDENT>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': fn_inputReadCallback = v<EOL>if k == '<STR_LIT>': fn_analysisCallback = v<EOL>if k == '<STR_LIT>': fn_outputWriteCallback = v<EOL>if k == '<STR_LIT>': str_applyResultsTo = v<EOL>if k == '<STR_LIT>': str_applyKey = v<EOL>if k == '<STR_LIT>': b_persistAnalysisResults = v<EOL><DEDENT>if str_applyResultsTo == '<STR_LIT>': <EOL><INDENT>d_tree = self.d_inputTree<EOL><DEDENT>index = <NUM_LIT:1><EOL>total = len(self.d_inputTree.keys())<EOL>l_threadAnalysis = []<EOL>if not self.numThreads: <EOL><INDENT>loop_nonThreaded()<EOL>str_processType = "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>loop_threaded()<EOL>str_processType = "<STR_LIT>"<EOL><DEDENT>return {<EOL>'<STR_LIT:status>': True,<EOL>'<STR_LIT>': str_processType,<EOL>'<STR_LIT>': index,<EOL>'<STR_LIT>': filesRead,<EOL>'<STR_LIT>': filesAnalyzed,<EOL>'<STR_LIT>': filesSaved<EOL>}<EOL> | kwargs:
inputReadCallback = callback to perform inputIO (read)
analysisCallback = callback to perform analysis
outputWriteCallback = callback to perform outputIO (write)
applyResultsTo = 'inputTree'|'outputTree'
applyKey = <arbitrary key in analysis dictionary>
persistAnalysisResults = True|False
This method performs the actual work of this class. Operations are
divided into three callback groups:
* Input reading
* Actual processing
* Output writing
The method will loop over all the "paths" in <inputTree>, and for each
"path" call the inputRead/dataAnalysis/outputWrite callbacks in order.
If this pftree object is initialized as multi-threaded, only the
dataAnalysis callback is actually threaded. The read and write
file IO callbacks are run sequentially for efficiency (threaded
file IO is horribly inefficient and actually degrades in linear
proportion to the number of threads).
The results of the analysis are typically stored in the corresponding
path in the <outputTree> (unless 'persistAnalysisResults' == False);
however, results can also be applied to the <inputTree> (see below).
The results of the dataAnalysisCallback are usually stored in the
outputTree at a path corresponding to the inputTree. If
kwargs: applyTo = 'inputTree'
is passed, then the results are saved to the <inputTree> instead.
Furthermore, if
kwargs: applyKey = 'someKey'
is passed, then only the results of 'someKey' in the returned
dictionary are saved.
Thus, an enclosing class can call this method to, for example, filter
the list of files at each path location by:
pftree.tree_process(
...
analysisCallback = fn_filterFileList,
applyResultsTo = 'inputTree',
applyKey = 'files'
)
will apply the callback function, fn_filterFileList and return some
filtered list in its return dictionary at key == 'files'. This
dictionary value is stored in the <inputTree>.
Finally, if either
self.b_peristOutputResults = True
or
kwargs: peristOutputResults = True
Then this method will save all output results at each location in the
<outputTree> path. This can become prohibitively large in memory if
operations are applied that seek to save large results at each
directory (like dicom anon, for example). In that case, passing/setting
a <False> will not save results in the <outputTree> (other than a
boolean status) and will immediately do a callback on the results
to process them. In this case, a kwargs
kwags: outputcallback = self.fn_outputcallback
is called on the dictionary result of the analysiscallback method. The
result of this outputcallback is saved to the <outputTree> instead.
Note that threading the analysisCallback will effectively result in
output results being persistent across the entire tree (since the execution
loop finishes each step sequenitally: all input IO, thread analysis, all
output IO). | f2113:c0:m8 |
def tree_analysisOutput(self, *args, **kwargs): | fn_outputcallback = None<EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': fn_outputcallback = v<EOL><DEDENT>index = <NUM_LIT:1><EOL>total = len(self.d_inputTree.keys())<EOL>for path, d_analysis in self.d_outputTree.items():<EOL><INDENT>self.simpleProgress_show(index, total)<EOL>self.dp.qprint("<STR_LIT>" % path)<EOL>d_output = fn_outputcallback((path, d_analysis), **kwargs)<EOL><DEDENT>return {<EOL>'<STR_LIT:status>': True<EOL>}<EOL> | An optional method for looping over the <outputTree> and
calling an outputcallback on the analysis results at each
path.
Only call this if self.b_persisAnalysisResults is True. | f2113:c0:m9 |
def stats_compute(self, *args, **kwargs): | totalElements = <NUM_LIT:0><EOL>totalKeys = <NUM_LIT:0><EOL>totalSize = <NUM_LIT:0><EOL>l_stats = []<EOL>d_report = {}<EOL>for k, v in sorted(self.d_inputTreeCallback.items(), <EOL>key = lambda kv: (kv[<NUM_LIT:1>]['<STR_LIT>']),<EOL>reverse = self.b_statsReverse):<EOL><INDENT>str_report = "<STR_LIT>" % (len(self.d_inputTree[k]), <EOL>self.d_inputTreeCallback[k]['<STR_LIT>'], <EOL>self.d_inputTreeCallback[k]['<STR_LIT>'], <EOL>k)<EOL>d_report = {<EOL>'<STR_LIT>': len(self.d_inputTree[k]),<EOL>'<STR_LIT>': self.d_inputTreeCallback[k]['<STR_LIT>'],<EOL>'<STR_LIT>': self.d_inputTreeCallback[k]['<STR_LIT>'],<EOL>'<STR_LIT:path>': k<EOL>}<EOL>self.dp.qprint(str_report, level = <NUM_LIT:1>)<EOL>l_stats.append(d_report)<EOL>totalElements += len(v)<EOL>totalKeys += <NUM_LIT:1><EOL>totalSize += self.d_inputTreeCallback[k]['<STR_LIT>']<EOL><DEDENT>str_totalSize_human = self.sizeof_fmt(totalSize)<EOL>return {<EOL>'<STR_LIT:status>': True,<EOL>'<STR_LIT>': totalKeys,<EOL>'<STR_LIT>': totalElements,<EOL>'<STR_LIT>': totalSize,<EOL>'<STR_LIT>': str_totalSize_human,<EOL>'<STR_LIT>': l_stats,<EOL>'<STR_LIT>': other.toc()<EOL>}<EOL> | Simply loop over the internal dictionary and
echo the list size at each key (i.e. the number
of files). | f2113:c0:m10 |
def inputReadCallback(self, *args, **kwargs): | b_status = True<EOL>filesRead = <NUM_LIT:0><EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': l_file = v<EOL>if k == '<STR_LIT:path>': str_path = v<EOL><DEDENT>if len(args):<EOL><INDENT>at_data = args[<NUM_LIT:0>]<EOL>str_path = at_data[<NUM_LIT:0>]<EOL>l_file = at_data[<NUM_LIT:1>]<EOL><DEDENT>self.dp.qprint("<STR_LIT>" % <EOL>(str_path, <EOL>self.pp.pformat(l_file)), <EOL>level = <NUM_LIT:5>)<EOL>filesRead = len(l_file)<EOL>if not len(l_file): b_status = False<EOL>return {<EOL>'<STR_LIT:status>': b_status,<EOL>'<STR_LIT>': l_file,<EOL>'<STR_LIT>': str_path,<EOL>'<STR_LIT>': filesRead<EOL>}<EOL> | Test for inputReadCallback
This method does not actually "read" the input files,
but simply returns the passed file list back to
caller | f2113:c0:m11 |
def inputAnalyzeCallback(self, *args, **kwargs): | b_status = False<EOL>filesRead = <NUM_LIT:0><EOL>filesAnalyzed = <NUM_LIT:0><EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': d_DCMRead = v<EOL>if k == '<STR_LIT:path>': str_path = v<EOL><DEDENT>if len(args):<EOL><INDENT>at_data = args[<NUM_LIT:0>]<EOL>str_path = at_data[<NUM_LIT:0>]<EOL>d_read = at_data[<NUM_LIT:1>]<EOL><DEDENT>b_status = True<EOL>self.dp.qprint("<STR_LIT>" % <EOL>self.pp.pformat(d_read['<STR_LIT>']), <EOL>level = <NUM_LIT:5>)<EOL>if int(self.f_sleepLength):<EOL><INDENT>self.dp.qprint("<STR_LIT>" % self.f_sleepLength, level = <NUM_LIT:5>)<EOL>time.sleep(self.f_sleepLength)<EOL><DEDENT>filesAnalyzed = len(d_read['<STR_LIT>'])<EOL>return {<EOL>'<STR_LIT:status>': b_status,<EOL>'<STR_LIT>': filesAnalyzed,<EOL>'<STR_LIT>': d_read['<STR_LIT>']<EOL>}<EOL> | Test method for inputAnalzeCallback
This method loops over the passed number of files,
and optionally "delays" in each loop to simulate
some analysis. The delay length is specified by
the '--test <delay>' flag. | f2113:c0:m12 |
def outputSaveCallback(self, at_data, **kwargs): | path = at_data[<NUM_LIT:0>]<EOL>d_outputInfo = at_data[<NUM_LIT:1>]<EOL>other.mkdir(self.str_outputDir)<EOL>filesSaved = <NUM_LIT:0><EOL>other.mkdir(path)<EOL>if not self.testType:<EOL><INDENT>str_outfile = '<STR_LIT>' % path<EOL><DEDENT>else:<EOL><INDENT>str_outfile = '<STR_LIT>' % path<EOL><DEDENT>with open(str_outfile, '<STR_LIT:w>') as f:<EOL><INDENT>self.dp.qprint("<STR_LIT>" % (str_outfile), level = <NUM_LIT:5>)<EOL>if not self.testType:<EOL><INDENT>f.write('<STR_LIT>' % self.pp.pformat(d_outputInfo['<STR_LIT>']))<EOL><DEDENT>else:<EOL><INDENT>f.write('<STR_LIT>' % d_outputInfo['<STR_LIT>'])<EOL><DEDENT><DEDENT>filesSaved += <NUM_LIT:1><EOL>return {<EOL>'<STR_LIT:status>': True,<EOL>'<STR_LIT>': str_outfile,<EOL>'<STR_LIT>': filesSaved<EOL>}<EOL> | Test method for outputSaveCallback
Simply writes a file in the output tree corresponding
to the number of files in the input tree. | f2113:c0:m13 |
def run(self, *args, **kwargs): | b_status = True<EOL>d_probe = {}<EOL>d_tree = {}<EOL>d_stats = {}<EOL>str_error = '<STR_LIT>'<EOL>b_timerStart = False<EOL>d_test = {}<EOL>for k, v in kwargs.items():<EOL><INDENT>if k == '<STR_LIT>': b_timerStart = bool(v)<EOL><DEDENT>if b_timerStart:<EOL><INDENT>other.tic()<EOL><DEDENT>if not os.path.exists(self.str_inputDir):<EOL><INDENT>b_status = False<EOL>self.dp.qprint(<EOL>"<STR_LIT>", <EOL>comms = '<STR_LIT:error>'<EOL>)<EOL>error.warn(self, '<STR_LIT>', exitToOS = True, drawBox = True)<EOL>str_error = '<STR_LIT>'<EOL><DEDENT>if b_status:<EOL><INDENT>str_origDir = os.getcwd()<EOL>if self.b_relativeDir:<EOL><INDENT>os.chdir(self.str_inputDir)<EOL>str_rootDir = '<STR_LIT:.>'<EOL><DEDENT>else:<EOL><INDENT>str_rootDir = self.str_inputDir<EOL><DEDENT>d_probe = self.tree_probe( <EOL>root = str_rootDir<EOL>)<EOL>b_status = b_status and d_probe['<STR_LIT:status>']<EOL>d_tree = self.tree_construct( <EOL>l_files = d_probe['<STR_LIT>'],<EOL>constructCallback = self.dirsize_get<EOL>)<EOL>b_status = b_status and d_tree['<STR_LIT:status>']<EOL>if self.b_test:<EOL><INDENT>d_test = self.test_run(*args, **kwargs)<EOL>b_status = b_status and d_test['<STR_LIT:status>']<EOL><DEDENT>else:<EOL><INDENT>if self.b_stats or self.b_statsReverse:<EOL><INDENT>d_stats = self.stats_compute()<EOL>self.dp.qprint('<STR_LIT>' % d_stats['<STR_LIT>'], level = <NUM_LIT:1>)<EOL>self.dp.qprint('<STR_LIT>' % d_stats['<STR_LIT>'], level = <NUM_LIT:1>)<EOL>self.dp.qprint('<STR_LIT>' % d_stats['<STR_LIT>'], level = <NUM_LIT:1>)<EOL>self.dp.qprint('<STR_LIT>' % d_stats['<STR_LIT>'], level = <NUM_LIT:1>)<EOL>b_status = b_status and d_stats['<STR_LIT:status>']<EOL><DEDENT><DEDENT>if self.b_jsonStats:<EOL><INDENT>print(json.dumps(d_stats, indent = <NUM_LIT:4>, sort_keys = True))<EOL><DEDENT>if self.b_relativeDir:<EOL><INDENT>os.chdir(str_origDir)<EOL><DEDENT><DEDENT>d_ret = {<EOL>'<STR_LIT:status>': b_status,<EOL>'<STR_LIT>': d_probe,<EOL>'<STR_LIT>': d_tree,<EOL>'<STR_LIT>': d_stats,<EOL>'<STR_LIT>': d_test,<EOL>'<STR_LIT>': str_error,<EOL>'<STR_LIT>': other.toc()<EOL>}<EOL>if self.b_json:<EOL><INDENT>print(json.dumps(d_ret, indent = <NUM_LIT:4>, sort_keys = True))<EOL><DEDENT>return d_ret<EOL> | Probe the input tree and print. | f2113:c0:m15 |
def black_box_function(x, y): | time.sleep(random.randint(<NUM_LIT:1>, <NUM_LIT:7>))<EOL>return -x ** <NUM_LIT:2> - (y - <NUM_LIT:1>) ** <NUM_LIT:2> + <NUM_LIT:1><EOL> | Function with unknown internals we wish to maximize.
This is just serving as an example, however, for all intents and
purposes think of the internals of this function, i.e.: the process
which generates its outputs values, as unknown. | f2122:m0 |
def post(self): | body = tornado.escape.json_decode(self.request.body)<EOL>try:<EOL><INDENT>self._bo.register(<EOL>params=body["<STR_LIT>"],<EOL>target=body["<STR_LIT:target>"],<EOL>)<EOL>print("<STR_LIT>".format(len(self._bo.space)), end="<STR_LIT>")<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>finally:<EOL><INDENT>suggested_params = self._bo.suggest(self._uf)<EOL><DEDENT>self.write(json.dumps(suggested_params))<EOL> | Deal with incoming requests. | f2122:c0:m0 |
def get_data(): | data, targets = make_classification(<EOL>n_samples=<NUM_LIT:1000>,<EOL>n_features=<NUM_LIT>,<EOL>n_informative=<NUM_LIT:12>,<EOL>n_redundant=<NUM_LIT:7>,<EOL>random_state=<NUM_LIT>,<EOL>)<EOL>return data, targets<EOL> | Synthetic binary classification dataset. | f2123:m0 |
def svc_cv(C, gamma, data, targets): | estimator = SVC(C=C, gamma=gamma, random_state=<NUM_LIT:2>)<EOL>cval = cross_val_score(estimator, data, targets, scoring='<STR_LIT>', cv=<NUM_LIT:4>)<EOL>return cval.mean()<EOL> | SVC cross validation.
This function will instantiate a SVC classifier with parameters C and
gamma. Combined with data and targets this will in turn be used to perform
cross validation. The result of cross validation is returned.
Our goal is to find combinations of C and gamma that maximizes the roc_auc
metric. | f2123:m1 |
def rfc_cv(n_estimators, min_samples_split, max_features, data, targets): | estimator = RFC(<EOL>n_estimators=n_estimators,<EOL>min_samples_split=min_samples_split,<EOL>max_features=max_features,<EOL>random_state=<NUM_LIT:2><EOL>)<EOL>cval = cross_val_score(estimator, data, targets,<EOL>scoring='<STR_LIT>', cv=<NUM_LIT:4>)<EOL>return cval.mean()<EOL> | Random Forest cross validation.
This function will instantiate a random forest classifier with parameters
n_estimators, min_samples_split, and max_features. Combined with data and
targets this will in turn be used to perform cross validation. The result
of cross validation is returned.
Our goal is to find combinations of n_estimators, min_samples_split, and
max_features that minimzes the log loss. | f2123:m2 |
def optimize_svc(data, targets): | def svc_crossval(expC, expGamma):<EOL><INDENT>"""<STR_LIT>"""<EOL>C = <NUM_LIT:10> ** expC<EOL>gamma = <NUM_LIT:10> ** expGamma<EOL>return svc_cv(C=C, gamma=gamma, data=data, targets=targets)<EOL><DEDENT>optimizer = BayesianOptimization(<EOL>f=svc_crossval,<EOL>pbounds={"<STR_LIT>": (-<NUM_LIT:3>, <NUM_LIT:2>), "<STR_LIT>": (-<NUM_LIT:4>, -<NUM_LIT:1>)},<EOL>random_state=<NUM_LIT>,<EOL>verbose=<NUM_LIT:2><EOL>)<EOL>optimizer.maximize(n_iter=<NUM_LIT:10>)<EOL>print("<STR_LIT>", optimizer.max)<EOL> | Apply Bayesian Optimization to SVC parameters. | f2123:m3 |
def optimize_rfc(data, targets): | def rfc_crossval(n_estimators, min_samples_split, max_features):<EOL><INDENT>"""<STR_LIT>"""<EOL>return rfc_cv(<EOL>n_estimators=int(n_estimators),<EOL>min_samples_split=int(min_samples_split),<EOL>max_features=max(min(max_features, <NUM_LIT>), <NUM_LIT>),<EOL>data=data,<EOL>targets=targets,<EOL>)<EOL><DEDENT>optimizer = BayesianOptimization(<EOL>f=rfc_crossval,<EOL>pbounds={<EOL>"<STR_LIT>": (<NUM_LIT:10>, <NUM_LIT>),<EOL>"<STR_LIT>": (<NUM_LIT:2>, <NUM_LIT>),<EOL>"<STR_LIT>": (<NUM_LIT:0.1>, <NUM_LIT>),<EOL>},<EOL>random_state=<NUM_LIT>,<EOL>verbose=<NUM_LIT:2><EOL>)<EOL>optimizer.maximize(n_iter=<NUM_LIT:10>)<EOL>print("<STR_LIT>", optimizer.max)<EOL> | Apply Bayesian Optimization to Random Forest parameters. | f2123:m4 |
def acq_max(ac, gp, y_max, bounds, random_state, n_warmup=<NUM_LIT>, n_iter=<NUM_LIT>): | <EOL>x_tries = random_state.uniform(bounds[:, <NUM_LIT:0>], bounds[:, <NUM_LIT:1>],<EOL>size=(n_warmup, bounds.shape[<NUM_LIT:0>]))<EOL>ys = ac(x_tries, gp=gp, y_max=y_max)<EOL>x_max = x_tries[ys.argmax()]<EOL>max_acq = ys.max()<EOL>x_seeds = random_state.uniform(bounds[:, <NUM_LIT:0>], bounds[:, <NUM_LIT:1>],<EOL>size=(n_iter, bounds.shape[<NUM_LIT:0>]))<EOL>for x_try in x_seeds:<EOL><INDENT>res = minimize(lambda x: -ac(x.reshape(<NUM_LIT:1>, -<NUM_LIT:1>), gp=gp, y_max=y_max),<EOL>x_try.reshape(<NUM_LIT:1>, -<NUM_LIT:1>),<EOL>bounds=bounds,<EOL>method="<STR_LIT>")<EOL>if not res.success:<EOL><INDENT>continue<EOL><DEDENT>if max_acq is None or -res.fun[<NUM_LIT:0>] >= max_acq:<EOL><INDENT>x_max = res.x<EOL>max_acq = -res.fun[<NUM_LIT:0>]<EOL><DEDENT><DEDENT>return np.clip(x_max, bounds[:, <NUM_LIT:0>], bounds[:, <NUM_LIT:1>])<EOL> | A function to find the maximum of the acquisition function
It uses a combination of random sampling (cheap) and the 'L-BFGS-B'
optimization method. First by sampling `n_warmup` (1e5) points at random,
and then running L-BFGS-B from `n_iter` (250) random starting points.
Parameters
----------
:param ac:
The acquisition function object that return its point-wise value.
:param gp:
A gaussian process fitted to the relevant data.
:param y_max:
The current maximum known value of the target function.
:param bounds:
The variables bounds to limit the search of the acq max.
:param random_state:
instance of np.RandomState random number generator
:param n_warmup:
number of times to randomly sample the aquisition function
:param n_iter:
number of times to run scipy.minimize
Returns
-------
:return: x_max, The arg max of the acquisition function. | f2125:m0 |
def load_logs(optimizer, logs): | import json<EOL>if isinstance(logs, str):<EOL><INDENT>logs = [logs]<EOL><DEDENT>for log in logs:<EOL><INDENT>with open(log, "<STR_LIT:r>") as j:<EOL><INDENT>while True:<EOL><INDENT>try:<EOL><INDENT>iteration = next(j)<EOL><DEDENT>except StopIteration:<EOL><INDENT>break<EOL><DEDENT>iteration = json.loads(iteration)<EOL>try:<EOL><INDENT>optimizer.register(<EOL>params=iteration["<STR_LIT>"],<EOL>target=iteration["<STR_LIT:target>"],<EOL>)<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return optimizer<EOL> | Load previous ... | f2125:m1 |
def ensure_rng(random_state=None): | if random_state is None:<EOL><INDENT>random_state = np.random.RandomState()<EOL><DEDENT>elif isinstance(random_state, int):<EOL><INDENT>random_state = np.random.RandomState(random_state)<EOL><DEDENT>else:<EOL><INDENT>assert isinstance(random_state, np.random.RandomState)<EOL><DEDENT>return random_state<EOL> | Creates a random number generator based on an optional seed. This can be
an integer or another random state for a seeded rng, or None for an
unseeded rng. | f2125:m2 |
def __init__(self, kind, kappa, xi): | self.kappa = kappa<EOL>self.xi = xi<EOL>if kind not in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>err = "<STR_LIT>""<STR_LIT>""<STR_LIT>".format(kind)<EOL>raise NotImplementedError(err)<EOL><DEDENT>else:<EOL><INDENT>self.kind = kind<EOL><DEDENT> | If UCB is to be used, a constant kappa is needed. | f2125:c0:m0 |
@classmethod<EOL><INDENT>def black(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.END)<EOL> | Wrap text in black. | f2125:c1:m1 |
@classmethod<EOL><INDENT>def blue(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.BLUE)<EOL> | Wrap text in blue. | f2125:c1:m2 |
@classmethod<EOL><INDENT>def bold(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.BOLD)<EOL> | Wrap text in bold. | f2125:c1:m3 |
@classmethod<EOL><INDENT>def cyan(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.CYAN)<EOL> | Wrap text in cyan. | f2125:c1:m4 |
@classmethod<EOL><INDENT>def darkcyan(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.DARKCYAN)<EOL> | Wrap text in darkcyan. | f2125:c1:m5 |
@classmethod<EOL><INDENT>def green(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.GREEN)<EOL> | Wrap text in green. | f2125:c1:m6 |
@classmethod<EOL><INDENT>def purple(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.PURPLE)<EOL> | Wrap text in purple. | f2125:c1:m7 |
@classmethod<EOL><INDENT>def red(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.RED)<EOL> | Wrap text in red. | f2125:c1:m8 |
@classmethod<EOL><INDENT>def underline(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.UNDERLINE)<EOL> | Wrap text in underline. | f2125:c1:m9 |
@classmethod<EOL><INDENT>def yellow(cls, s):<DEDENT> | return cls._wrap_colour(s, cls.YELLOW)<EOL> | Wrap text in yellow. | f2125:c1:m10 |
def add(self, obj): | self._queue.append(obj)<EOL> | Add object to end of queue. | f2126:c0:m5 |
def register(self, params, target): | self._space.register(params, target)<EOL>self.dispatch(Events.OPTMIZATION_STEP)<EOL> | Expect observation with known target | f2126:c2:m4 |
def probe(self, params, lazy=True): | if lazy:<EOL><INDENT>self._queue.add(params)<EOL><DEDENT>else:<EOL><INDENT>self._space.probe(params)<EOL>self.dispatch(Events.OPTMIZATION_STEP)<EOL><DEDENT> | Probe target of x | f2126:c2:m5 |
def suggest(self, utility_function): | if len(self._space) == <NUM_LIT:0>:<EOL><INDENT>return self._space.array_to_params(self._space.random_sample())<EOL><DEDENT>with warnings.catch_warnings():<EOL><INDENT>warnings.simplefilter("<STR_LIT:ignore>")<EOL>self._gp.fit(self._space.params, self._space.target)<EOL><DEDENT>suggestion = acq_max(<EOL>ac=utility_function.utility,<EOL>gp=self._gp,<EOL>y_max=self._space.target.max(),<EOL>bounds=self._space.bounds,<EOL>random_state=self._random_state<EOL>)<EOL>return self._space.array_to_params(suggestion)<EOL> | Most promissing point to probe next | f2126:c2:m6 |
def _prime_queue(self, init_points): | if self._queue.empty and self._space.empty:<EOL><INDENT>init_points = max(init_points, <NUM_LIT:1>)<EOL><DEDENT>for _ in range(init_points):<EOL><INDENT>self._queue.add(self._space.random_sample())<EOL><DEDENT> | Make sure there's something in the queue at the very beginning. | f2126:c2:m7 |
def maximize(self,<EOL>init_points=<NUM_LIT:5>,<EOL>n_iter=<NUM_LIT>,<EOL>acq='<STR_LIT>',<EOL>kappa=<NUM_LIT>,<EOL>xi=<NUM_LIT:0.0>,<EOL>**gp_params): | self._prime_subscriptions()<EOL>self.dispatch(Events.OPTMIZATION_START)<EOL>self._prime_queue(init_points)<EOL>self.set_gp_params(**gp_params)<EOL>util = UtilityFunction(kind=acq, kappa=kappa, xi=xi)<EOL>iteration = <NUM_LIT:0><EOL>while not self._queue.empty or iteration < n_iter:<EOL><INDENT>try:<EOL><INDENT>x_probe = next(self._queue)<EOL><DEDENT>except StopIteration:<EOL><INDENT>x_probe = self.suggest(util)<EOL>iteration += <NUM_LIT:1><EOL><DEDENT>self.probe(x_probe, lazy=False)<EOL><DEDENT>self.dispatch(Events.OPTMIZATION_END)<EOL> | Mazimize your function | f2126:c2:m9 |
def set_bounds(self, new_bounds): | self._space.set_bounds(new_bounds)<EOL> | A method that allows changing the lower and upper searching bounds
Parameters
----------
new_bounds : dict
A dictionary with the parameter name and its new bounds | f2126:c2:m10 |
def _hashable(x): | return tuple(map(float, x))<EOL> | ensure that an point is hashable by a python dict | f2127:m0 |
def __init__(self, target_func, pbounds, random_state=None): | self.random_state = ensure_rng(random_state)<EOL>self.target_func = target_func<EOL>self._keys = sorted(pbounds)<EOL>self._bounds = np.array(<EOL>[item[<NUM_LIT:1>] for item in sorted(pbounds.items(), key=lambda x: x[<NUM_LIT:0>])],<EOL>dtype=np.float<EOL>)<EOL>self._params = np.empty(shape=(<NUM_LIT:0>, self.dim))<EOL>self._target = np.empty(shape=(<NUM_LIT:0>))<EOL>self._cache = {}<EOL> | Parameters
----------
target_func : function
Function to be maximized.
pbounds : dict
Dictionary with parameters names as keys and a tuple with minimum
and maximum values.
random_state : int, RandomState, or None
optionally specify a seed for a random number generator | f2127:c0:m0 |
def register(self, params, target): | x = self._as_array(params)<EOL>if x in self:<EOL><INDENT>raise KeyError('<STR_LIT>'.format(x))<EOL><DEDENT>self._cache[_hashable(x.ravel())] = target<EOL>self._params = np.concatenate([self._params, x.reshape(<NUM_LIT:1>, -<NUM_LIT:1>)])<EOL>self._target = np.concatenate([self._target, [target]])<EOL> | Append a point and its target value to the known data.
Parameters
----------
x : ndarray
a single point, with len(x) == self.dim
y : float
target function value
Raises
------
KeyError:
if the point is not unique
Notes
-----
runs in ammortized constant time
Example
-------
>>> pbounds = {'p1': (0, 1), 'p2': (1, 100)}
>>> space = TargetSpace(lambda p1, p2: p1 + p2, pbounds)
>>> len(space)
0
>>> x = np.array([0, 0])
>>> y = 1
>>> space.add_observation(x, y)
>>> len(space)
1 | f2127:c0:m12 |
def probe(self, params): | x = self._as_array(params)<EOL>try:<EOL><INDENT>target = self._cache[_hashable(x)]<EOL><DEDENT>except KeyError:<EOL><INDENT>params = dict(zip(self._keys, x))<EOL>target = self.target_func(**params)<EOL>self.register(x, target)<EOL><DEDENT>return target<EOL> | Evaulates a single point x, to obtain the value y and then records them
as observations.
Notes
-----
If x has been previously seen returns a cached value of y.
Parameters
----------
x : ndarray
a single point, with len(x) == self.dim
Returns
-------
y : float
target function value. | f2127:c0:m13 |
def random_sample(self): | <EOL>data = np.empty((<NUM_LIT:1>, self.dim))<EOL>for col, (lower, upper) in enumerate(self._bounds):<EOL><INDENT>data.T[col] = self.random_state.uniform(lower, upper, size=<NUM_LIT:1>)<EOL><DEDENT>return data.ravel()<EOL> | Creates random points within the bounds of the space.
Returns
----------
data: ndarray
[num x dim] array points with dimensions corresponding to `self._keys`
Example
-------
>>> target_func = lambda p1, p2: p1 + p2
>>> pbounds = {'p1': (0, 1), 'p2': (1, 100)}
>>> space = TargetSpace(target_func, pbounds, random_state=0)
>>> space.random_points(1)
array([[ 55.33253689, 0.54488318]]) | f2127:c0:m14 |
def max(self): | try:<EOL><INDENT>res = {<EOL>'<STR_LIT:target>': self.target.max(),<EOL>'<STR_LIT>': dict(<EOL>zip(self.keys, self.params[self.target.argmax()])<EOL>)<EOL>}<EOL><DEDENT>except ValueError:<EOL><INDENT>res = {}<EOL><DEDENT>return res<EOL> | Get maximum target value found and corresponding parametes. | f2127:c0:m15 |
def res(self): | params = [dict(zip(self.keys, p)) for p in self.params]<EOL>return [<EOL>{"<STR_LIT:target>": target, "<STR_LIT>": param}<EOL>for target, param in zip(self.target, params)<EOL>]<EOL> | Get all target values found and corresponding parametes. | f2127:c0:m16 |
def set_bounds(self, new_bounds): | for row, key in enumerate(self.keys):<EOL><INDENT>if key in new_bounds:<EOL><INDENT>self._bounds[row] = new_bounds[key]<EOL><DEDENT><DEDENT> | A method that allows changing the lower and upper searching bounds
Parameters
----------
new_bounds : dict
A dictionary with the parameter name and its new bounds | f2127:c0:m17 |
def configurate_app(config_file='<STR_LIT>'): | <EOL>app.config.from_pyfile('<STR_LIT>')<EOL>app.config.from_pyfile(config_file, silent=True)<EOL>if app.config.get('<STR_LIT>', False):<EOL><INDENT>app.jinja_env.add_extension('<STR_LIT>')<EOL><DEDENT>assets = Environment(app)<EOL>js = Bundle('<STR_LIT>', filters='<STR_LIT>', output='<STR_LIT>')<EOL>css = Bundle('<STR_LIT>', filters='<STR_LIT>', output='<STR_LIT>')<EOL>assets.register('<STR_LIT>', js)<EOL>assets.register('<STR_LIT>', css)<EOL>port = app.config.get('<STR_LIT>', <NUM_LIT>)<EOL>host = app.config.get('<STR_LIT>', '<STR_LIT:127.0.0.1>')<EOL>return app, host, port<EOL> | Configures Flask app
:param config_file: Absolute path to Py config file, optional
:returns: App object, host and port | f2138:m0 |
def create_request(query): | yarr_url = app.config.get('<STR_LIT>', False)<EOL>if not yarr_url:<EOL><INDENT>raise('<STR_LIT>')<EOL><DEDENT>api_token = app.config.get('<STR_LIT>', False)<EOL>headers = {'<STR_LIT>': api_token} if api_token else {}<EOL>payload = {'<STR_LIT:q>': query}<EOL>url = '<STR_LIT>' % yarr_url<EOL>return requests.get(url, params=payload, headers=headers)<EOL> | Creates a GET request to Yarr! server
:param query: Free-text search query
:returns: Requests object | f2138:m1 |
def pipe(p1, p2): | if isinstance(p1, Pipeable) or isinstance(p2, Pipeable):<EOL><INDENT>return p1 | p2<EOL><DEDENT>return Pipe([p1, p2])<EOL> | Joins two pipes | f2160:m10 |
def get(self, data, item): | return data.get(item.src)<EOL> | Get corresponding data for item
:param data: source data
:param item: item to get
Subsclasses can override this method to implement map access to more complex
structures then plain dict | f2160:c1:m0 |
def merge_dicts(*dicts, **kwargs): | result = {}<EOL>for d in dicts:<EOL><INDENT>result.update(d)<EOL><DEDENT>result.update(kwargs)<EOL>return result<EOL> | Merges dicts and kwargs into one dict | f2164:m0 |
def parse_qs(qs): | result = {}<EOL>qs = bstr(qs, '<STR_LIT>')<EOL>pairs = [s2 for s1 in qs.split(b'<STR_LIT:&>') for s2 in s1.split(b'<STR_LIT:;>')]<EOL>uq = urlparse.unquote if PY2 else urlparse.unquote_to_bytes<EOL>for name_value in pairs:<EOL><INDENT>if not name_value:<EOL><INDENT>continue<EOL><DEDENT>nv = name_value.split(b'<STR_LIT:=>', <NUM_LIT:1>)<EOL>if len(nv) != <NUM_LIT:2>:<EOL><INDENT>nv.append(b'<STR_LIT>')<EOL><DEDENT>name = nv[<NUM_LIT:0>].replace(b'<STR_LIT:+>', b'<STR_LIT:U+0020>')<EOL>name = uq(name)<EOL>if not PY2: <EOL><INDENT>name = ustr(name, '<STR_LIT>')<EOL><DEDENT>value = nv[<NUM_LIT:1>].replace(b'<STR_LIT:+>', b'<STR_LIT:U+0020>')<EOL>value = uq(value)<EOL>result.setdefault(name, []).append(value)<EOL><DEDENT>return result<EOL> | Helper func to parse query string with py2/py3 compatibility
Ensures that dict keys are native strings. | f2164:m1 |
def clone(src, **kwargs): | obj = object.__new__(type(src))<EOL>obj.__dict__.update(src.__dict__)<EOL>obj.__dict__.update(kwargs)<EOL>return obj<EOL> | Clones object with optionally overridden fields | f2164:m2 |
def wrap_in(key): | return lambda val: {key: val}<EOL> | Wraps value in dict ``{key: value}`` | f2164:m3 |
def dpass(value): | return value<EOL> | Allows complex inline expressions in decorator
For example::
@dpass(params(arg=int) | (lambda r: {'arg': r['arg'] + 10}))
def boo(request, arg):
pass
Is equivalent of::
d = params(arg=int) | (lambda r: {'arg': r['arg'] + 10})
@d
def boo(request, arg):
pass | f2164:m5 |
def clean_text(self, text): | if text is None:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>text = re.sub(ILLEGAL_CHARACTERS_RE, '<STR_LIT>', text)<EOL>if '<STR_LIT:<>' in text or '<STR_LIT>' in text:<EOL><INDENT>text = clean(text, tags=self.tags, strip=self.strip)<EOL><DEDENT>return unescape(text)<EOL> | Clean text using bleach. | f2176:c0:m1 |
def _make_version(major, minor, micro, releaselevel, serial): | assert releaselevel in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>']<EOL>version = "<STR_LIT>" % (major, minor)<EOL>if micro:<EOL><INDENT>version += "<STR_LIT>" % (micro,)<EOL><DEDENT>if releaselevel != '<STR_LIT>':<EOL><INDENT>short = {'<STR_LIT>': '<STR_LIT:a>', '<STR_LIT>': '<STR_LIT:b>', '<STR_LIT>': '<STR_LIT>'}[releaselevel]<EOL>version += "<STR_LIT>" % (short, serial)<EOL><DEDENT>return version<EOL> | Create a readable version string from version_info tuple components. | f2178:m0 |
def _make_url(major, minor, micro, releaselevel, serial): | url = "<STR_LIT>"<EOL>if releaselevel != '<STR_LIT>':<EOL><INDENT>url += "<STR_LIT>" + _make_version(major, minor, micro, releaselevel, serial)<EOL><DEDENT>return url<EOL> | Make the URL people should start at for this version of coverage.py. | f2178:m1 |
@register.tag(name='<STR_LIT>')<EOL>def do_autopaginate(parser, token): | split = token.split_contents()<EOL>as_index = None<EOL>context_var = None<EOL>for i, bit in enumerate(split):<EOL><INDENT>if bit == '<STR_LIT>':<EOL><INDENT>as_index = i<EOL>break<EOL><DEDENT><DEDENT>if as_index is not None:<EOL><INDENT>try:<EOL><INDENT>context_var = split[as_index + <NUM_LIT:1>]<EOL><DEDENT>except IndexError:<EOL><INDENT>raise template.TemplateSyntaxError("<STR_LIT>" +<EOL>"<STR_LIT>" +<EOL>"<STR_LIT>" % split[<NUM_LIT:0>])<EOL><DEDENT>del split[as_index:as_index + <NUM_LIT:2>]<EOL><DEDENT>if len(split) == <NUM_LIT:2>:<EOL><INDENT>return AutoPaginateNode(split[<NUM_LIT:1>])<EOL><DEDENT>elif len(split) == <NUM_LIT:3>:<EOL><INDENT>return AutoPaginateNode(split[<NUM_LIT:1>], paginate_by=split[<NUM_LIT:2>], <EOL>context_var=context_var)<EOL><DEDENT>elif len(split) == <NUM_LIT:4>:<EOL><INDENT>try:<EOL><INDENT>orphans = int(split[<NUM_LIT:3>])<EOL><DEDENT>except ValueError:<EOL><INDENT>raise template.TemplateSyntaxError('<STR_LIT>'<EOL>% split[<NUM_LIT:3>])<EOL><DEDENT>return AutoPaginateNode(split[<NUM_LIT:1>], paginate_by=split[<NUM_LIT:2>], orphans=orphans,<EOL>context_var=context_var)<EOL><DEDENT>else:<EOL><INDENT>raise template.TemplateSyntaxError('<STR_LIT>' +<EOL>'<STR_LIT>' % split[<NUM_LIT:0>])<EOL><DEDENT> | Splits the arguments to the autopaginate tag and formats them correctly. | f2179:m0 |
def paginate(context, window=DEFAULT_WINDOW, hashtag='<STR_LIT>'): | try:<EOL><INDENT>paginator = context['<STR_LIT>']<EOL>page_obj = context['<STR_LIT>']<EOL>page_range = list(paginator.page_range)<EOL>records = {'<STR_LIT>': <NUM_LIT:1> + (page_obj.number - <NUM_LIT:1>) * paginator.per_page}<EOL>records['<STR_LIT>'] = records['<STR_LIT>'] + paginator.per_page - <NUM_LIT:1><EOL>if records['<STR_LIT>'] + paginator.orphans >= paginator.count:<EOL><INDENT>records['<STR_LIT>'] = paginator.count<EOL><DEDENT>first = set(page_range[:window])<EOL>last = set(page_range[-window:])<EOL>current_start = page_obj.number-<NUM_LIT:1>-window<EOL>if current_start < <NUM_LIT:0>:<EOL><INDENT>current_start = <NUM_LIT:0><EOL><DEDENT>current_end = page_obj.number-<NUM_LIT:1>+window<EOL>if current_end < <NUM_LIT:0>:<EOL><INDENT>current_end = <NUM_LIT:0><EOL><DEDENT>current = set(page_range[current_start:current_end])<EOL>pages = []<EOL>if len(first.intersection(current)) == <NUM_LIT:0>:<EOL><INDENT>first_list = list(first)<EOL>first_list.sort()<EOL>second_list = list(current)<EOL>second_list.sort()<EOL>pages.extend(first_list)<EOL>diff = second_list[<NUM_LIT:0>] - first_list[-<NUM_LIT:1>]<EOL>if diff == <NUM_LIT:2>:<EOL><INDENT>pages.append(second_list[<NUM_LIT:0>] - <NUM_LIT:1>)<EOL><DEDENT>elif diff == <NUM_LIT:1>:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>pages.append(None)<EOL><DEDENT>pages.extend(second_list)<EOL><DEDENT>else:<EOL><INDENT>unioned = list(first.union(current))<EOL>unioned.sort()<EOL>pages.extend(unioned)<EOL><DEDENT>if len(current.intersection(last)) == <NUM_LIT:0>:<EOL><INDENT>second_list = list(last)<EOL>second_list.sort()<EOL>diff = second_list[<NUM_LIT:0>] - pages[-<NUM_LIT:1>]<EOL>if diff == <NUM_LIT:2>:<EOL><INDENT>pages.append(second_list[<NUM_LIT:0>] - <NUM_LIT:1>)<EOL><DEDENT>elif diff == <NUM_LIT:1>:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>pages.append(None)<EOL><DEDENT>pages.extend(second_list)<EOL><DEDENT>else:<EOL><INDENT>differenced = list(last.difference(current))<EOL>differenced.sort()<EOL>pages.extend(differenced)<EOL><DEDENT>to_return = {<EOL>'<STR_LIT>': settings.MEDIA_URL,<EOL>'<STR_LIT>': pages,<EOL>'<STR_LIT>': records,<EOL>'<STR_LIT>': page_obj,<EOL>'<STR_LIT>': paginator,<EOL>'<STR_LIT>': hashtag,<EOL>'<STR_LIT>': paginator.count > paginator.per_page,<EOL>}<EOL>if '<STR_LIT>' in context:<EOL><INDENT>getvars = context['<STR_LIT>'].GET.copy()<EOL>if '<STR_LIT>' in getvars:<EOL><INDENT>del getvars['<STR_LIT>']<EOL><DEDENT>if len(list(getvars.keys())) > <NUM_LIT:0>:<EOL><INDENT>to_return['<STR_LIT>'] = "<STR_LIT>" % getvars.urlencode()<EOL><DEDENT>else:<EOL><INDENT>to_return['<STR_LIT>'] = '<STR_LIT>'<EOL><DEDENT><DEDENT>return to_return<EOL><DEDENT>except KeyError as AttributeError:<EOL><INDENT>return {}<EOL><DEDENT> | Renders the ``pagination.html`` template, resulting in a
Digg-like display of the available pages, given the current page. If there
are too many pages to be displayed before and after the current page, then
elipses will be used to indicate the undisplayed gap between page numbers.
Requires one argument, ``context``, which should be a dictionary-like data
structure and must contain the following keys:
``paginator``
A ``Paginator`` or ``QuerySetPaginator`` object.
``page_obj``
This should be the result of calling the page method on the
aforementioned ``Paginator`` or ``QuerySetPaginator`` object, given
the current page.
This same ``context`` dictionary-like data structure may also include:
``getvars``
A dictionary of all of the **GET** parameters in the current request.
This is useful to maintain certain types of state, even when requesting
a different page. | f2179:m1 |
def get_page(self): | try:<EOL><INDENT>return int(self.GET.get('<STR_LIT>'))<EOL><DEDENT>except (KeyError, ValueError, TypeError):<EOL><INDENT>return <NUM_LIT:1><EOL><DEDENT> | A function which will be monkeypatched onto the request to get the current
integer representing the current page. | f2180:m0 |
def validate_number(self, number): | try:<EOL><INDENT>number = int(number)<EOL><DEDENT>except ValueError:<EOL><INDENT>raise PageNotAnInteger('<STR_LIT>')<EOL><DEDENT>if number < <NUM_LIT:1>:<EOL><INDENT>raise EmptyPage('<STR_LIT>')<EOL><DEDENT>return number<EOL> | Validates the given 1-based page number. | f2182:c0:m1 |
def page(self, number): | number = self.validate_number(number)<EOL>bottom = (number - <NUM_LIT:1>) * self.per_page<EOL>top = bottom + self.per_page<EOL>page_items = self.object_list[bottom:top]<EOL>if not page_items:<EOL><INDENT>if number == <NUM_LIT:1> and self.allow_empty_first_page:<EOL><INDENT>pass<EOL><DEDENT>else:<EOL><INDENT>raise EmptyPage('<STR_LIT>')<EOL><DEDENT><DEDENT>return InfinitePage(page_items, number, self)<EOL> | Returns a Page object for the given 1-based page number. | f2182:c0:m2 |
def _get_count(self): | raise NotImplementedError<EOL> | Returns the total number of objects, across all pages. | f2182:c0:m3 |
def _get_num_pages(self): | raise NotImplementedError<EOL> | Returns the total number of pages. | f2182:c0:m4 |
def _get_page_range(self): | raise NotImplementedError<EOL> | Returns a 1-based range of pages for iterating through within
a template for loop. | f2182:c0:m5 |
def has_next(self): | try:<EOL><INDENT>next_item = self.paginator.object_list[<EOL>self.number * self.paginator.per_page]<EOL><DEDENT>except IndexError:<EOL><INDENT>return False<EOL><DEDENT>return True<EOL> | Checks for one more item than last on this page. | f2182:c1:m1 |
def end_index(self): | return ((self.number - <NUM_LIT:1>) * self.paginator.per_page +<EOL>len(self.object_list))<EOL> | Returns the 1-based index of the last object on this page,
relative to total objects found (hits). | f2182:c1:m2 |
def page(self, number): | number = self.validate_number(number)<EOL>page_items = self.object_list[:self.per_page]<EOL>return FinitePage(page_items, number, self)<EOL> | Returns a Page object for the given 1-based page number. | f2182:c2:m2 |
def has_next(self): | try:<EOL><INDENT>next_item = self.paginator.object_list[self.paginator.per_page]<EOL><DEDENT>except IndexError:<EOL><INDENT>return False<EOL><DEDENT>return True<EOL> | Checks for one more item than last on this page. | f2182:c3:m0 |
def start_index(self): | <EOL>return self.paginator.offset<EOL> | Returns the 1-based index of the first object on this page,
relative to total objects in the paginator. | f2182:c3:m1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.