id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/lava_nc-0.8.0-py3-none-any.whl/lava/magma/runtime/runtime_services/runtime_service.py
|
import logging
import typing as ty
from abc import abstractmethod
import numpy as np
from lava.magma.compiler.channels.pypychannel import (
CspSelector,
CspRecvPort,
CspSendPort
)
from lava.magma.core.sync.protocol import AbstractSyncProtocol
from lava.magma.runtime.mgmt_token_enums import (
enum_to_np,
enum_equal,
MGMT_RESPONSE,
MGMT_COMMAND,
)
from lava.magma.runtime.runtime_services.enums import LoihiPhase
from lava.magma.runtime.runtime_services.interfaces import \
AbstractRuntimeService
class PyRuntimeService(AbstractRuntimeService):
"""Abstract RuntimeService for Python, it provides base methods
for start and run. It is not meant to instantiated directly
but used by inheritance
"""
def __init__(
self, protocol: ty.Type[AbstractSyncProtocol], *args, **kwargs
):
self.log = logging.getLogger(__name__)
self.log.setLevel(kwargs.get("loglevel", logging.WARNING))
super(PyRuntimeService, self).__init__(protocol=protocol)
self.service_to_process: ty.Iterable[CspSendPort] = []
self.process_to_service: ty.Iterable[CspRecvPort] = []
def start(self):
"""Start the necessary channels to coordinate with runtime and group
of processes this RuntimeService is managing"""
self.runtime_to_service.start()
self.service_to_runtime.start()
for i in range(len(self.service_to_process)):
self.service_to_process[i].start()
self.process_to_service[i].start()
self.run()
@abstractmethod
def run(self):
"""Override this method to implement the runtime service. The run
method is invoked upon start which called when the execution is
started by the runtime."""
def join(self):
"""Stop the necessary channels to coordinate with runtime and group
of processes this RuntimeService is managing"""
self.runtime_to_service.join()
self.service_to_runtime.join()
for i in range(len(self.service_to_process)):
self.service_to_process[i].join()
self.process_to_service[i].join()
def _relay_to_runtime_data_given_model_id(self, model_id: int):
"""Relays data received from ProcessModel given by model id to the
runtime"""
process_idx = self.model_ids.index(model_id)
data_recv_port = self.process_to_service[process_idx]
data_relay_port = self.service_to_runtime
num_items = data_recv_port.recv()
data_relay_port.send(num_items)
for _ in range(int(num_items[0])):
value = data_recv_port.recv()
data_relay_port.send(value)
def _relay_to_pm_data_given_model_id(self, model_id: int) -> MGMT_RESPONSE:
"""Relays data received from the runtime to the ProcessModel given by
the model id."""
process_idx = self.model_ids.index(model_id)
data_recv_port = self.runtime_to_service
data_relay_port = self.service_to_process[process_idx]
resp_port = self.process_to_service[process_idx]
# Receive and relay number of items
num_items = data_recv_port.recv()
data_relay_port.send(num_items)
# Receive and relay data1, data2, ...
for _ in range(int(num_items[0].item())):
data_relay_port.send(data_recv_port.recv())
rsp = resp_port.recv()
return rsp
def _send_pm_req_given_model_id(self, model_id: int, *requests):
"""Sends requests to a ProcessModel given by the model id."""
process_idx = self.model_ids.index(model_id)
req_port = self.service_to_process[process_idx]
for request in requests:
req_port.send(request)
def _handle_get_set(self, command):
if enum_equal(command, MGMT_COMMAND.GET_DATA):
requests: ty.List[np.ndarray] = [command]
# recv model_id
model_id: int = int(self.runtime_to_service.recv()[0].item())
# recv var_id
requests.append(self.runtime_to_service.recv())
self._send_pm_req_given_model_id(model_id, *requests)
self._relay_to_runtime_data_given_model_id(model_id)
elif enum_equal(command, MGMT_COMMAND.SET_DATA):
requests: ty.List[np.ndarray] = [command]
# recv model_id
model_id: int = int(self.runtime_to_service.recv()[0].item())
# recv var_id
requests.append(self.runtime_to_service.recv())
self._send_pm_req_given_model_id(model_id, *requests)
rsp = self._relay_to_pm_data_given_model_id(model_id)
self.service_to_runtime.send(rsp)
else:
raise RuntimeError(f"Unknown request {command}")
class LoihiPyRuntimeService(PyRuntimeService):
"""RuntimeService that implements Loihi SyncProtocol in Python."""
def __init__(self, protocol, *args, **kwargs):
super().__init__(protocol, *args, **kwargs)
self.req_pre_lrn_mgmt = False
self.req_post_lrn_mgmt = False
self.req_lrn = False
self.req_stop = False
self.req_pause = False
self.paused = False
self._error = False
self.pausing = False
self.stopping = False
class Phase:
SPK = enum_to_np(1)
PRE_MGMT = enum_to_np(2)
LRN = enum_to_np(3)
POST_MGMT = enum_to_np(4)
HOST = enum_to_np(5)
class PMResponse:
STATUS_DONE = enum_to_np(0)
"""Signfies Ack or Finished with the Command"""
STATUS_TERMINATED = enum_to_np(-1)
"""Signifies Termination"""
STATUS_ERROR = enum_to_np(-2)
"""Signifies Error raised"""
STATUS_PAUSED = enum_to_np(-3)
"""Signifies Execution State to be Paused"""
REQ_PRE_LRN_MGMT = enum_to_np(-4)
"""Signifies Request of PREMPTION"""
REQ_LEARNING = enum_to_np(-5)
"""Signifies Request of LEARNING"""
REQ_POST_LRN_MGMT = enum_to_np(-6)
"""Signifies Request of PREMPTION"""
REQ_PAUSE = enum_to_np(-7)
"""Signifies Request of PAUSE"""
REQ_STOP = enum_to_np(-8)
"""Signifies Request of STOP"""
def _next_phase(self, is_last_time_step: bool):
"""Advances the current phase to the next phase.
On the first time step it starts with HOST phase and advances to SPK.
Afterwards it loops: SPK -> PRE_MGMT -> LRN -> POST_MGMT -> SPK
On the last time step POST_MGMT advances to HOST phase."""
if self.req_pre_lrn_mgmt:
self.req_pre_lrn_mgmt = False
return LoihiPyRuntimeService.Phase.PRE_MGMT
if self.req_lrn:
self.req_lrn = False
return LoihiPyRuntimeService.Phase.LRN
if self.req_post_lrn_mgmt:
self.req_post_lrn_mgmt = False
return LoihiPyRuntimeService.Phase.POST_MGMT
if self.req_pause:
self.req_pause = False
return MGMT_COMMAND.PAUSE
if self.req_stop:
self.req_stop = False
return MGMT_COMMAND.STOP
if is_last_time_step:
return LoihiPyRuntimeService.Phase.HOST
return LoihiPyRuntimeService.Phase.SPK
def _send_pm_cmd(self, phase: MGMT_COMMAND):
"""Sends a command (phase information) to all ProcessModels."""
for send_port in self.service_to_process:
send_port.send(phase)
def _get_pm_resp(self) -> ty.Iterable[MGMT_RESPONSE]:
"""Retrieves responses of all ProcessModels."""
rcv_msgs = []
num_responses_expected = len(self.model_ids)
counter = 0
while counter < num_responses_expected:
ptos_recv_port = self.process_to_service[counter]
rcv_msgs.append(ptos_recv_port.recv())
counter += 1
for idx, recv_msg in enumerate(rcv_msgs):
if enum_equal(
recv_msg, LoihiPyRuntimeService.PMResponse.STATUS_ERROR
):
self._error = True
if enum_equal(
recv_msg, LoihiPyRuntimeService.PMResponse.REQ_PRE_LRN_MGMT
):
self.req_pre_lrn_mgmt = True
if enum_equal(
recv_msg, LoihiPyRuntimeService.PMResponse.REQ_POST_LRN_MGMT
):
self.req_post_lrn_mgmt = True
if enum_equal(
recv_msg, LoihiPyRuntimeService.PMResponse.REQ_LEARNING
):
self.req_lrn = True
if enum_equal(
recv_msg, LoihiPyRuntimeService.PMResponse.REQ_PAUSE
):
self.log.info(f"Process : {idx} has requested Pause")
self.req_pause = True
if enum_equal(recv_msg, LoihiPyRuntimeService.PMResponse.REQ_STOP):
self.log.info(f"Process : {idx} has requested Stop")
self.req_stop = True
return rcv_msgs
def _relay_pm_ack_given_model_id(self, model_id: int):
"""Relays ack received from ProcessModel given by model id to the
runtime."""
process_idx = self.model_ids.index(model_id)
ack_recv_port = self.process_to_service[process_idx]
ack_relay_port = self.service_to_runtime
ack_relay_port.send(ack_recv_port.recv())
def _handle_pause(self):
# Inform all ProcessModels about the PAUSE command
self._send_pm_cmd(MGMT_COMMAND.PAUSE)
rsps = self._get_pm_resp()
for rsp in rsps:
if not enum_equal(
rsp, LoihiPyRuntimeService.PMResponse.STATUS_PAUSED
):
raise ValueError(f"Wrong Response Received : {rsp}")
# Inform the runtime about successful pausing
self.service_to_runtime.send(MGMT_RESPONSE.PAUSED)
def _handle_stop(self):
# Inform all ProcessModels about the STOP command
self._send_pm_cmd(MGMT_COMMAND.STOP)
rsps = self._get_pm_resp()
for rsp in rsps:
if not enum_equal(
rsp, LoihiPyRuntimeService.PMResponse.STATUS_TERMINATED
):
raise ValueError(f"Wrong Response Received : {rsp}")
# Inform the runtime about successful termination
self.service_to_runtime.send(MGMT_RESPONSE.TERMINATED)
self.join()
def run(self):
"""Retrieves commands from the runtime. On STOP or PAUSE commands all
ProcessModels are notified and expected to TERMINATE or PAUSE,
respectively. Otherwise the number of time steps is received as command.
In this case iterate through the phases of the Loihi protocol until the
last time step is reached. The runtime is informed after the last time
step. The loop ends when receiving the STOP command from the runtime."""
selector = CspSelector()
phase = LoihiPhase.HOST
channel_actions = [(self.runtime_to_service, lambda: "cmd")]
while True:
# Probe if there is a new command from the runtime
action = selector.select(*channel_actions)
if action == "cmd":
command = self.runtime_to_service.recv()
if enum_equal(command, MGMT_COMMAND.STOP):
self._handle_stop()
return
elif enum_equal(command, MGMT_COMMAND.PAUSE):
self._handle_pause()
self.paused = True
elif enum_equal(command, MGMT_COMMAND.GET_DATA) or enum_equal(
command, MGMT_COMMAND.SET_DATA
):
if enum_equal(phase, LoihiPhase.HOST):
self._handle_get_set(command)
else:
raise ValueError(f"Wrong Phase: {phase} to call "
f"GET/SET")
else:
self.paused = False
# The number of time steps was received ("command")
# Start iterating through Loihi phases
curr_time_step = 0
phase = LoihiPhase.HOST
is_last_ts = False
while True:
# Check if it is the last time step
is_last_ts = enum_equal(
enum_to_np(curr_time_step), command
)
# Advance to the next phase
phase = self._next_phase(is_last_ts)
if enum_equal(phase, MGMT_COMMAND.STOP):
if not self.stopping:
self.service_to_runtime.send(
MGMT_RESPONSE.REQ_STOP)
phase = LoihiPhase.HOST
break
if enum_equal(phase, MGMT_COMMAND.PAUSE):
if not self.pausing:
self.service_to_runtime.send(
MGMT_RESPONSE.REQ_PAUSE)
# Move to Host phase (get/set Var needs it)
phase = LoihiPhase.HOST
break
# Increase time step if spiking phase
if enum_equal(phase, LoihiPhase.SPK):
curr_time_step += 1
# Inform ProcessModels about current phase
self._send_pm_cmd(phase)
# ProcessModels respond with DONE if not HOST phase
if not enum_equal(
phase, LoihiPyRuntimeService.Phase.HOST
):
self._get_pm_resp()
if self._error:
# Forward error to runtime
self.service_to_runtime.send(
MGMT_RESPONSE.ERROR
)
# stop all other pm
self._send_pm_cmd(MGMT_COMMAND.STOP)
return
# Check if pause or stop received from Runtime
if self.runtime_to_service.probe():
cmd = self.runtime_to_service.peek()
if enum_equal(cmd, MGMT_COMMAND.STOP):
self.stopping = True
self.req_stop = True
if enum_equal(cmd, MGMT_COMMAND.PAUSE):
self.pausing = True
self.req_pause = True
# If HOST phase (last time step ended) break the loop
if enum_equal(phase, LoihiPhase.HOST):
break
if self.pausing or self.stopping or enum_equal(
phase, MGMT_COMMAND.STOP) or enum_equal(
phase, MGMT_COMMAND.PAUSE):
continue
# Inform the runtime that last time step was reached
if is_last_ts:
self.service_to_runtime.send(MGMT_RESPONSE.DONE)
else:
self.service_to_runtime.send(MGMT_RESPONSE.ERROR)
class AsyncPyRuntimeService(PyRuntimeService):
"""RuntimeService that implements Async SyncProtocol in Py."""
def __init__(self, protocol, *args, **kwargs):
super().__init__(protocol, args, kwargs)
self.req_stop = False
self.req_pause = False
self._error = False
self.running = False
class PMResponse:
STATUS_DONE = enum_to_np(0)
"""Signfies Ack or Finished with the Command"""
STATUS_TERMINATED = enum_to_np(-1)
"""Signifies Termination"""
STATUS_ERROR = enum_to_np(-2)
"""Signifies Error raised"""
STATUS_PAUSED = enum_to_np(-3)
"""Signifies Execution State to be Paused"""
REQ_PAUSE = enum_to_np(-4)
"""Signifies Request of PAUSE"""
REQ_STOP = enum_to_np(-5)
"""Signifies Request of STOP"""
def _send_pm_cmd(self, cmd: MGMT_COMMAND):
for stop_send_port in self.service_to_process:
stop_send_port.send(cmd)
def _get_pm_resp(self, stop=False, pause=False) -> ty.Iterable[
MGMT_RESPONSE]:
rcv_msgs = []
for ptos_recv_port in self.process_to_service:
rcv_msg = ptos_recv_port.recv()
if stop or pause:
if enum_equal(
rcv_msg, LoihiPyRuntimeService.PMResponse.STATUS_DONE
):
rcv_msg = ptos_recv_port.recv()
rcv_msgs.append(rcv_msg)
return rcv_msgs
def _handle_pause(self):
# Inform the runtime about successful pausing
self._send_pm_cmd(MGMT_COMMAND.PAUSE)
rsps = self._get_pm_resp(pause=True)
for rsp in rsps:
if not enum_equal(
rsp, LoihiPyRuntimeService.PMResponse.STATUS_PAUSED
):
self.service_to_runtime.send(MGMT_RESPONSE.ERROR)
raise ValueError(f"Wrong Response Received : {rsp}")
self.service_to_runtime.send(MGMT_RESPONSE.PAUSED)
def _handle_stop(self):
self._send_pm_cmd(MGMT_COMMAND.STOP)
rsps = self._get_pm_resp(stop=True)
for rsp in rsps:
if not enum_equal(
rsp, LoihiPyRuntimeService.PMResponse.STATUS_TERMINATED
):
self.service_to_runtime.send(MGMT_RESPONSE.ERROR)
raise ValueError(f"Wrong Response Received : {rsp}")
# Inform the runtime about successful termination
self.service_to_runtime.send(MGMT_RESPONSE.TERMINATED)
self.join()
def run(self):
"""Retrieves commands from the runtime and relays them to the process
models. Also send the acknowledgement back to runtime."""
selector = CspSelector()
channel_actions = [(self.runtime_to_service, lambda: "cmd")]
while True:
# Probe if there is a new command from the runtime
action = selector.select(*channel_actions)
channel_actions = []
if action == "cmd":
command = self.runtime_to_service.recv()
if enum_equal(command, MGMT_COMMAND.STOP):
self._handle_stop()
self.running = False
return
elif enum_equal(command, MGMT_COMMAND.PAUSE):
self._handle_pause()
self.running = False
elif enum_equal(command, MGMT_COMMAND.GET_DATA) or enum_equal(
command, MGMT_COMMAND.SET_DATA
):
if not self.running:
self._handle_get_set(command)
else:
raise ValueError(f"Wrong Phase: {self.running} to call "
f"GET/SET. AsyncProcess should not "
f"be running when it gets GET/SET "
f"call.")
else:
self._send_pm_cmd(MGMT_COMMAND.RUN)
self._send_pm_cmd(command)
self.running = True
for ptos_recv_port in self.process_to_service:
channel_actions.append(
(ptos_recv_port, lambda: "resp")
)
elif action == "resp":
resps = self._get_pm_resp()
done: bool = True
for resp in resps:
if enum_equal(
resp, AsyncPyRuntimeService.PMResponse.REQ_PAUSE
):
self.req_pause = True
if enum_equal(
resp, AsyncPyRuntimeService.PMResponse.REQ_STOP
):
self.req_stop = True
if enum_equal(
resp, AsyncPyRuntimeService.PMResponse.STATUS_ERROR
):
self._error = True
if not enum_equal(resp,
AsyncPyRuntimeService.PMResponse.STATUS_DONE): # noqa: E501
done = False
if done:
self.service_to_runtime.send(MGMT_RESPONSE.DONE)
if self.req_stop:
self.service_to_runtime.send(MGMT_RESPONSE.REQ_STOP)
if self.req_pause:
self.service_to_runtime.send(MGMT_RESPONSE.REQ_PAUSE)
if self._error:
self.service_to_runtime.send(MGMT_RESPONSE.ERROR)
self.running = False
else:
self.service_to_runtime.send(MGMT_RESPONSE.ERROR)
self.running = False
raise ValueError(f"Wrong type of channel action : {action}")
channel_actions.append((self.runtime_to_service, lambda: "cmd"))
|
PypiClean
|
/ipdb-0.13.13.tar.gz/ipdb-0.13.13/README.rst
|
IPython `pdb`
=============
.. image:: https://travis-ci.org/gotcha/ipdb.png?branch=master
:target: https://travis-ci.org/gotcha/ipdb
.. image:: https://codecov.io/gh/gotcha/ipdb/branch/master/graphs/badge.svg?style=flat
:target: https://codecov.io/gh/gotcha/ipdb?branch=master
Use
---
ipdb exports functions to access the IPython_ debugger, which features
tab completion, syntax highlighting, better tracebacks, better introspection
with the same interface as the `pdb` module.
Example usage:
.. code-block:: python
import ipdb
ipdb.set_trace()
ipdb.set_trace(context=5) # will show five lines of code
# instead of the default three lines
# or you can set it via IPDB_CONTEXT_SIZE env variable
# or setup.cfg file
ipdb.pm()
ipdb.run('x[0] = 3')
result = ipdb.runcall(function, arg0, arg1, kwarg='foo')
result = ipdb.runeval('f(1,2) - 3')
Arguments for `set_trace`
+++++++++++++++++++++++++
The `set_trace` function accepts `context` which will show as many lines of code as defined,
and `cond`, which accepts boolean values (such as `abc == 17`) and will start ipdb's
interface whenever `cond` equals to `True`.
Using configuration file
++++++++++++++++++++++++
It's possible to set up context using a `.ipdb` file on your home folder, `setup.cfg`
or `pyproject.toml` on your project folder. You can also set your file location via
env var `$IPDB_CONFIG`. Your environment variable has priority over the home
configuration file, which in turn has priority over the setup config file.
Currently, only context setting is available.
A valid setup.cfg is as follows
::
[ipdb]
context=5
A valid .ipdb is as follows
::
context=5
A valid pyproject.toml is as follows
::
[tool.ipdb]
context=5
The post-mortem function, ``ipdb.pm()``, is equivalent to the magic function
``%debug``.
.. _IPython: http://ipython.org
If you install ``ipdb`` with a tool which supports ``setuptools`` entry points,
an ``ipdb`` script is made for you. You can use it to debug your python 2 scripts like
::
$ bin/ipdb mymodule.py
And for python 3
::
$ bin/ipdb3 mymodule.py
Alternatively with Python 2.7 only, you can also use
::
$ python -m ipdb mymodule.py
You can also enclose code with the ``with`` statement to launch ipdb if an exception is raised:
.. code-block:: python
from ipdb import launch_ipdb_on_exception
with launch_ipdb_on_exception():
[...]
.. warning::
Context managers were introduced in Python 2.5.
Adding a context manager implies dropping Python 2.4 support.
Use ``ipdb==0.6`` with 2.4.
Or you can use ``iex`` as a function decorator to launch ipdb if an exception is raised:
.. code-block:: python
from ipdb import iex
@iex
def main():
[...]
.. warning::
Using ``from future import print_function`` for Python 3 compat implies dropping Python 2.5 support.
Use ``ipdb<=0.8`` with 2.5.
Issues with ``stdout``
----------------------
Some tools, like ``nose`` fiddle with ``stdout``.
Until ``ipdb==0.9.4``, we tried to guess when we should also
fiddle with ``stdout`` to support those tools.
However, all strategies tried until 0.9.4 have proven brittle.
If you use ``nose`` or another tool that fiddles with ``stdout``, you should
explicitly ask for ``stdout`` fiddling by using ``ipdb`` like this
.. code-block:: python
import ipdb
ipdb.sset_trace()
ipdb.spm()
from ipdb import slaunch_ipdb_on_exception
with slaunch_ipdb_on_exception():
[...]
Development
-----------
``ipdb`` source code and tracker are at https://github.com/gotcha/ipdb.
Pull requests should take care of updating the changelog ``HISTORY.txt``.
Under the unreleased section, add your changes and your username.
Manual testing
++++++++++++++
To test your changes, make use of ``manual_test.py``. Create a virtual environment,
install IPython and run ``python manual_test.py`` and check if your changes are in effect.
If possible, create automated tests for better behaviour control.
Automated testing
+++++++++++++++++
To run automated tests locally, create a virtual environment, install `coverage`
and run `coverage run setup.py test`.
Third-party support
-------------------
Products.PDBDebugMode
+++++++++++++++++++++
Zope2 Products.PDBDebugMode_ uses ``ipdb``, if available, in place of ``pdb``.
.. _Products.PDBDebugMode: http://pypi.python.org/pypi/Products.PDBDebugMode
iw.debug
++++++++
iw.debug_ allows you to trigger an ``ipdb`` debugger on any published object
of a Zope2 application.
.. _iw.debug: http://pypi.python.org/pypi/iw.debug
ipdbplugin
++++++++++
ipdbplugin_ is a nose_ test runner plugin that also uses the IPython debugger
instead of ``pdb``. (It does not depend on ``ipdb`` anymore).
.. _ipdbplugin: http://pypi.python.org/pypi/ipdbplugin
.. _nose: http://readthedocs.org/docs/nose
|
PypiClean
|
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/internal-slot/README.md
|
# internal-slot <sup>[![Version Badge][npm-version-svg]][package-url]</sup>
[![dependency status][deps-svg]][deps-url]
[![dev dependency status][dev-deps-svg]][dev-deps-url]
[![License][license-image]][license-url]
[![Downloads][downloads-image]][downloads-url]
[![npm badge][npm-badge-png]][package-url]
Truly private storage, akin to the JS spec’s concept of internal slots.
Uses a WeakMap when available; a Map when not; and a regular object in even older engines. Performance and garbage collection behavior will reflect the environment’s capabilities accordingly.
## Example
```js
var SLOT = require('internal-slot');
var assert = require('assert');
var o = {};
assert.throws(function () { SLOT.assert(o, 'foo'); });
assert.equal(SLOT.has(o, 'foo'), false);
assert.equal(SLOT.get(o, 'foo'), undefined);
SLOT.set(o, 'foo', 42);
assert.equal(SLOT.has(o, 'foo'), true);
assert.equal(SLOT.get(o, 'foo'), 42);
assert.doesNotThrow(function () { SLOT.assert(o, 'foo'); });
```
## Tests
Simply clone the repo, `npm install`, and run `npm test`
## Security
Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report.
[package-url]: https://npmjs.org/package/internal-slot
[npm-version-svg]: https://versionbadg.es/ljharb/internal-slot.svg
[deps-svg]: https://david-dm.org/ljharb/internal-slot.svg
[deps-url]: https://david-dm.org/ljharb/internal-slot
[dev-deps-svg]: https://david-dm.org/ljharb/internal-slot/dev-status.svg
[dev-deps-url]: https://david-dm.org/ljharb/internal-slot#info=devDependencies
[npm-badge-png]: https://nodei.co/npm/internal-slot.png?downloads=true&stars=true
[license-image]: https://img.shields.io/npm/l/internal-slot.svg
[license-url]: LICENSE
[downloads-image]: https://img.shields.io/npm/dm/internal-slot.svg
[downloads-url]: https://npm-stat.com/charts.html?package=internal-slot
|
PypiClean
|
/msgraph-sdk-1.0.0a3.tar.gz/msgraph-sdk-1.0.0a3/msgraph/generated/users/item/calendars/item/events/item/instances/item/snooze_reminder/snooze_reminder_post_request_body.py
|
from __future__ import annotations
from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
from typing import Any, Callable, Dict, List, Optional, Union
from ..........models import date_time_time_zone
class SnoozeReminderPostRequestBody(AdditionalDataHolder, Parsable):
"""
Provides operations to call the snoozeReminder method.
"""
@property
def additional_data(self,) -> Dict[str, Any]:
"""
Gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Returns: Dict[str, Any]
"""
return self._additional_data
@additional_data.setter
def additional_data(self,value: Dict[str, Any]) -> None:
"""
Sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Args:
value: Value to set for the AdditionalData property.
"""
self._additional_data = value
def __init__(self,) -> None:
"""
Instantiates a new snoozeReminderPostRequestBody and sets the default values.
"""
# Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
self._additional_data: Dict[str, Any] = {}
# The NewReminderTime property
self._new_reminder_time: Optional[date_time_time_zone.DateTimeTimeZone] = None
@staticmethod
def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> SnoozeReminderPostRequestBody:
"""
Creates a new instance of the appropriate class based on discriminator value
Args:
parseNode: The parse node to use to read the discriminator value and create the object
Returns: SnoozeReminderPostRequestBody
"""
if parse_node is None:
raise Exception("parse_node cannot be undefined")
return SnoozeReminderPostRequestBody()
def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
fields = {
"new_reminder_time": lambda n : setattr(self, 'new_reminder_time', n.get_object_value(date_time_time_zone.DateTimeTimeZone)),
}
return fields
@property
def new_reminder_time(self,) -> Optional[date_time_time_zone.DateTimeTimeZone]:
"""
Gets the newReminderTime property value. The NewReminderTime property
Returns: Optional[date_time_time_zone.DateTimeTimeZone]
"""
return self._new_reminder_time
@new_reminder_time.setter
def new_reminder_time(self,value: Optional[date_time_time_zone.DateTimeTimeZone] = None) -> None:
"""
Sets the newReminderTime property value. The NewReminderTime property
Args:
value: Value to set for the NewReminderTime property.
"""
self._new_reminder_time = value
def serialize(self,writer: SerializationWriter) -> None:
"""
Serializes information the current object
Args:
writer: Serialization writer to use to serialize this model
"""
if writer is None:
raise Exception("writer cannot be undefined")
writer.write_object_value("NewReminderTime", self.new_reminder_time)
writer.write_additional_data_value(self.additional_data)
|
PypiClean
|
/infoblox-netmri-3.8.0.0.tar.gz/infoblox-netmri-3.8.0.0/infoblox_netmri/api/broker/v2_8_0/device_flow_filter_cfg_broker.py
|
from ..broker import Broker
class DeviceFlowFilterCfgBroker(Broker):
controller = "device_flow_filter_cfgs"
def show(self, **kwargs):
"""Shows the details for the specified device flow filter cfg.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device flow filter cfg methods. The listed methods will be called on each device flow filter cfg returned and included in the output. Available methods are: src_device_zone, dest_device_zone, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: src_device_zone, dest_device_zone, data_source, device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_flow_filter_cfg: The device flow filter cfg identified by the specified DeviceFlowFilterCfgID.
:rtype device_flow_filter_cfg: DeviceFlowFilterCfg
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available device flow filter cfgs. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device flow filter cfgs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device flow filter cfg methods. The listed methods will be called on each device flow filter cfg returned and included in the output. Available methods are: src_device_zone, dest_device_zone, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: src_device_zone, dest_device_zone, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceFlowFilterCfgID
:param sort: The data field(s) to use for sorting the output. Default is DeviceFlowFilterCfgID. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceFlowFilterCfg. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_flow_filter_cfgs: An array of the DeviceFlowFilterCfg objects that match the specified input criteria.
:rtype device_flow_filter_cfgs: Array of DeviceFlowFilterCfg
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available device flow filter cfgs matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DestDeviceZoneID: The internal NetMRI identifier of the Zone that is destination point for this ip packet flow definition.
:type DestDeviceZoneID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which belongs this ip packet flow definition
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcChangedCols: The fields that changed between this revision of the record and the previous revision.
:type FfcChangedCols: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcConfigText: The text that was defined in the configuration for this ip packet flow definition.
:type FfcConfigText: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcData: Extra data for this usage of the rulelist. May depend on the vendor implementation.
:type FfcData: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcDisplayText: The associated text for display.
:type FfcDisplayText: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcEndTime: The ending effective time of this record, or empty if still in effect.
:type FfcEndTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcFirstSeenTime: The timestamp of when NetMRI saw for the first time this ip packet flow definition
:type FfcFirstSeenTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcName: The name associated with this usage of the rulelist.
:type FfcName: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcProvisionData: Internal data - do not modify, may change without warning.
:type FfcProvisionData: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcStartTime: The starting effective time of this record.
:type FfcStartTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcTimestamp: The date and time this record was collected or calculated.
:type FfcTimestamp: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param FfcType: The type of operation applied on this ip packet flow definition. One of : 'filter', 'nat', 'vpn',
:type FfcType: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SrcDeviceZoneID: The internal NetMRI identifier of the Zone that is source point for this ip packet flow definition
:type SrcDeviceZoneID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device flow filter cfgs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device flow filter cfg methods. The listed methods will be called on each device flow filter cfg returned and included in the output. Available methods are: src_device_zone, dest_device_zone, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: src_device_zone, dest_device_zone, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceFlowFilterCfgID
:param sort: The data field(s) to use for sorting the output. Default is DeviceFlowFilterCfgID. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceFlowFilterCfg. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device flow filter cfgs, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DestDeviceZoneID, DeviceFlowFilterCfgID, DeviceID, FfcChangedCols, FfcConfigText, FfcData, FfcDisplayText, FfcEndTime, FfcFirstSeenTime, FfcName, FfcProvisionData, FfcStartTime, FfcTimestamp, FfcType, SrcDeviceZoneID.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_flow_filter_cfgs: An array of the DeviceFlowFilterCfg objects that match the specified input criteria.
:rtype device_flow_filter_cfgs: Array of DeviceFlowFilterCfg
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device flow filter cfgs matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DestDeviceZoneID, DeviceFlowFilterCfgID, DeviceID, FfcChangedCols, FfcConfigText, FfcData, FfcDisplayText, FfcEndTime, FfcFirstSeenTime, FfcName, FfcProvisionData, FfcStartTime, FfcTimestamp, FfcType, SrcDeviceZoneID.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DestDeviceZoneID: The operator to apply to the field DestDeviceZoneID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DestDeviceZoneID: The internal NetMRI identifier of the Zone that is destination point for this ip packet flow definition. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DestDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DestDeviceZoneID: If op_DestDeviceZoneID is specified, the field named in this input will be compared to the value in DestDeviceZoneID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DestDeviceZoneID must be specified if op_DestDeviceZoneID is specified.
:type val_f_DestDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DestDeviceZoneID: If op_DestDeviceZoneID is specified, this value will be compared to the value in DestDeviceZoneID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DestDeviceZoneID must be specified if op_DestDeviceZoneID is specified.
:type val_c_DestDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceFlowFilterCfgID: The operator to apply to the field DeviceFlowFilterCfgID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceFlowFilterCfgID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceFlowFilterCfgID: If op_DeviceFlowFilterCfgID is specified, the field named in this input will be compared to the value in DeviceFlowFilterCfgID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceFlowFilterCfgID must be specified if op_DeviceFlowFilterCfgID is specified.
:type val_f_DeviceFlowFilterCfgID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceFlowFilterCfgID: If op_DeviceFlowFilterCfgID is specified, this value will be compared to the value in DeviceFlowFilterCfgID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceFlowFilterCfgID must be specified if op_DeviceFlowFilterCfgID is specified.
:type val_c_DeviceFlowFilterCfgID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device to which belongs this ip packet flow definition For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcChangedCols: The operator to apply to the field FfcChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcChangedCols: If op_FfcChangedCols is specified, the field named in this input will be compared to the value in FfcChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcChangedCols must be specified if op_FfcChangedCols is specified.
:type val_f_FfcChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcChangedCols: If op_FfcChangedCols is specified, this value will be compared to the value in FfcChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcChangedCols must be specified if op_FfcChangedCols is specified.
:type val_c_FfcChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcConfigText: The operator to apply to the field FfcConfigText. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcConfigText: The text that was defined in the configuration for this ip packet flow definition. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcConfigText: If op_FfcConfigText is specified, the field named in this input will be compared to the value in FfcConfigText using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcConfigText must be specified if op_FfcConfigText is specified.
:type val_f_FfcConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcConfigText: If op_FfcConfigText is specified, this value will be compared to the value in FfcConfigText using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcConfigText must be specified if op_FfcConfigText is specified.
:type val_c_FfcConfigText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcData: The operator to apply to the field FfcData. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcData: Extra data for this usage of the rulelist. May depend on the vendor implementation. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcData: If op_FfcData is specified, the field named in this input will be compared to the value in FfcData using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcData must be specified if op_FfcData is specified.
:type val_f_FfcData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcData: If op_FfcData is specified, this value will be compared to the value in FfcData using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcData must be specified if op_FfcData is specified.
:type val_c_FfcData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcDisplayText: The operator to apply to the field FfcDisplayText. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcDisplayText: The associated text for display. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcDisplayText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcDisplayText: If op_FfcDisplayText is specified, the field named in this input will be compared to the value in FfcDisplayText using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcDisplayText must be specified if op_FfcDisplayText is specified.
:type val_f_FfcDisplayText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcDisplayText: If op_FfcDisplayText is specified, this value will be compared to the value in FfcDisplayText using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcDisplayText must be specified if op_FfcDisplayText is specified.
:type val_c_FfcDisplayText: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcEndTime: The operator to apply to the field FfcEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcEndTime: If op_FfcEndTime is specified, the field named in this input will be compared to the value in FfcEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcEndTime must be specified if op_FfcEndTime is specified.
:type val_f_FfcEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcEndTime: If op_FfcEndTime is specified, this value will be compared to the value in FfcEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcEndTime must be specified if op_FfcEndTime is specified.
:type val_c_FfcEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcFirstSeenTime: The operator to apply to the field FfcFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcFirstSeenTime: The timestamp of when NetMRI saw for the first time this ip packet flow definition For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcFirstSeenTime: If op_FfcFirstSeenTime is specified, the field named in this input will be compared to the value in FfcFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcFirstSeenTime must be specified if op_FfcFirstSeenTime is specified.
:type val_f_FfcFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcFirstSeenTime: If op_FfcFirstSeenTime is specified, this value will be compared to the value in FfcFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcFirstSeenTime must be specified if op_FfcFirstSeenTime is specified.
:type val_c_FfcFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcName: The operator to apply to the field FfcName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcName: The name associated with this usage of the rulelist. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcName: If op_FfcName is specified, the field named in this input will be compared to the value in FfcName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcName must be specified if op_FfcName is specified.
:type val_f_FfcName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcName: If op_FfcName is specified, this value will be compared to the value in FfcName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcName must be specified if op_FfcName is specified.
:type val_c_FfcName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcProvisionData: The operator to apply to the field FfcProvisionData. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcProvisionData: Internal data - do not modify, may change without warning. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcProvisionData: If op_FfcProvisionData is specified, the field named in this input will be compared to the value in FfcProvisionData using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcProvisionData must be specified if op_FfcProvisionData is specified.
:type val_f_FfcProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcProvisionData: If op_FfcProvisionData is specified, this value will be compared to the value in FfcProvisionData using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcProvisionData must be specified if op_FfcProvisionData is specified.
:type val_c_FfcProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcStartTime: The operator to apply to the field FfcStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcStartTime: If op_FfcStartTime is specified, the field named in this input will be compared to the value in FfcStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcStartTime must be specified if op_FfcStartTime is specified.
:type val_f_FfcStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcStartTime: If op_FfcStartTime is specified, this value will be compared to the value in FfcStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcStartTime must be specified if op_FfcStartTime is specified.
:type val_c_FfcStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcTimestamp: The operator to apply to the field FfcTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcTimestamp: If op_FfcTimestamp is specified, the field named in this input will be compared to the value in FfcTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcTimestamp must be specified if op_FfcTimestamp is specified.
:type val_f_FfcTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcTimestamp: If op_FfcTimestamp is specified, this value will be compared to the value in FfcTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcTimestamp must be specified if op_FfcTimestamp is specified.
:type val_c_FfcTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_FfcType: The operator to apply to the field FfcType. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. FfcType: The type of operation applied on this ip packet flow definition. One of : 'filter', 'nat', 'vpn', For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_FfcType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_FfcType: If op_FfcType is specified, the field named in this input will be compared to the value in FfcType using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_FfcType must be specified if op_FfcType is specified.
:type val_f_FfcType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_FfcType: If op_FfcType is specified, this value will be compared to the value in FfcType using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_FfcType must be specified if op_FfcType is specified.
:type val_c_FfcType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SrcDeviceZoneID: The operator to apply to the field SrcDeviceZoneID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SrcDeviceZoneID: The internal NetMRI identifier of the Zone that is source point for this ip packet flow definition For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SrcDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SrcDeviceZoneID: If op_SrcDeviceZoneID is specified, the field named in this input will be compared to the value in SrcDeviceZoneID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SrcDeviceZoneID must be specified if op_SrcDeviceZoneID is specified.
:type val_f_SrcDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SrcDeviceZoneID: If op_SrcDeviceZoneID is specified, this value will be compared to the value in SrcDeviceZoneID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SrcDeviceZoneID must be specified if op_SrcDeviceZoneID is specified.
:type val_c_SrcDeviceZoneID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device flow filter cfgs as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device flow filter cfg methods. The listed methods will be called on each device flow filter cfg returned and included in the output. Available methods are: src_device_zone, dest_device_zone, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: src_device_zone, dest_device_zone, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceFlowFilterCfgID
:param sort: The data field(s) to use for sorting the output. Default is DeviceFlowFilterCfgID. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceFlowFilterCfg. Valid values are DeviceFlowFilterCfgID, DeviceID, DataSourceID, FfcName, FfcData, FfcFirstSeenTime, FfcStartTime, FfcEndTime, FfcTimestamp, FfcChangedCols, SrcDeviceZoneID, DestDeviceZoneID, FfcType, FfcDisplayText, FfcConfigText, FfcProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_flow_filter_cfgs: An array of the DeviceFlowFilterCfg objects that match the specified input criteria.
:rtype device_flow_filter_cfgs: Array of DeviceFlowFilterCfg
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def src_device_zone(self, **kwargs):
"""The DeviceZone that is source point for this ip packet flow definition.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The DeviceZone that is source point for this ip packet flow definition.
:rtype : DeviceZone
"""
return self.api_request(self._get_method_fullname("src_device_zone"), kwargs)
def dest_device_zone(self, **kwargs):
"""The DeviceZone that is destination point for this ip packet flow definition.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The DeviceZone that is destination point for this ip packet flow definition.
:rtype : DeviceZone
"""
return self.api_request(self._get_method_fullname("dest_device_zone"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceFlowFilterCfgID: The internal NetMRI identifier for this ip packet flow definition.
:type DeviceFlowFilterCfgID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
|
PypiClean
|
/cassowary-0.5.2.tar.gz/cassowary-0.5.2/docs/topics/theory.rst
|
Solving constraint systems
==========================
Constraint solving systems are an algorithmic approach to solving Linear
Programming problems. A linear programming problem is a mathematical problem
where you have a set of non- negative, real valued variables (``x[1], x[2],
... x[n]``), and a series of linear constraints (i.e, no exponential terms) on
those variables. These constraints are expressed as a set of equations of the
form:
``a[1]x[1] + ... + a[n]x[n] = b``,
``a[1]x[1] + ... + a[n]x[n] <= b``, or
``a[1]x[1] + ... + a[n]x[n] >= b``,
Given these constraints, the problem is to find the values of ``x[i]`` that
minimize or maximize the value of an **objective function**:
``c + d[1]x[1] + ... + d[n]x[n]``
Cassowary is an algorithm designed to solve linear programming problems of
this type. Published in 1997, it now forms the basis for the UI layout tools
in OS X Lion, and iOS 6+ (the approach known as `Auto Layout`_). The Cassowary
algorithm (and this implementation of it) provides the tools to describe a set
of constraints, and then find an optimal solution for that set of constraints.
.. _Auto Layout: https://developer.apple.com/library/ios/documentation/userexperience/conceptual/AutolayoutPG/Introduction/Introduction.html
Variables
---------
At the core of the constraint problem are the variables in the system.
In the formal mathematical system, these are the ``x[n]`` terms; in Python,
these are rendered as instances of the :class:`~cassowary.Variable` class.
Each variable is named, and can accept a default value. To create a variable,
instantiate an instance of :class:`~cassowary.Variable`::
from cassowary import Variable
# Create a variable with a default value.
x1 = Variable('x1')
# Create a variable with a specific value
x2 = Variable('x1', 42.0)
Any value provided for the variable is just a starting point. When constraints
are imposed, this value can and will change, subject to the requirements of
the constraints. However, providing an initial value may affect the search process;
if there's an ambiguity in the constraints (i.e., there's more than one
possible solution), the initial value provided to variables will affect the solution
on which the system converges.
Constraints
-----------
A constraint is a mathematical equality or inequality that defines the linear
programming system.
A constraint is declared by providing the Python expression that encompasses the
logic described by the constraint. The syntax looks essentially the same as the
raw mathematical expression::
from cassowary import Variable
# Create a variable with a default value.
x1 = Variable('x1')
x2 = Variable('x2')
x3 = Variable('x4')
# Define the constraint
constraint = x1 + 3 * x2 <= 4 * x3 + 2
In this example, `constraint` holds the definition for the constraint system.
Although the statement uses the Python comparison operator `<=`, the result is
*not* a boolean. The comparison operators `<=`, `<`, `>=`, `>`, and `==` have
been overridden for instances of :class:`~cassowary.Variable` to enable you to
easily define constraints.
Solvers
-------
The solver is the engine that resolves the linear constraints into a solution.
There are many approaches to this problem, and the development of algorithmic
approaches has been the subject of math and computer science research for over
70 years. Cassowary provides one implementation -- a
:class:`~cassowary.SimplexSolver`, implementing the Simplex algorithm defined
by Dantzig in the 1940s.
The solver takes no arguments during constructions; once constructed, you simply
add constraints to the system.
As a simple example, let's solve the problem posed in Section 2 of the `Badros
& Borning's paper on Cassowary`_. In this problem, we have a 1 dimensional
number line spanning from 0 to 100. There are three points on it (left, middle
and right), with the following constraints:
* The middle point must be halfway between the left and right point;
* The left point must be at least 10 to the left of the right point;
* All points must fall in the range 0-100.
This system can be defined in Python as follows::
from cassowary import SimplexSolver, Variable
solver = SimplexSolver()
left = Variable('left')
middle = Variable('middle')
right = Variable('right')
solver.add_constraint(middle == (left + right) / 2)
solver.add_constraint(right == left + 10)
solver.add_constraint(right <= 100)
solver.add_constraint(left >= 0)
There are an infinite number of possible solutions to this system; if we
interrogate the variables, you'll see that the solver has provided one
possible solution::
>>> left.value
90.0
>>> middle.value
95.0
>>> right.value
100.0
.. _Badros & Borning's paper on Cassowary: http://www.cs.washington.edu/research/constraints/cassowary/cassowary-tr.pdf
Stay constraints
----------------
If we want a particular solution to our left/right/middle problem, we need to
fix a value somewhere. To do this, we add a `Stay` - a special constraint that
says that the value should not be altered.
For example, we might want to enforce the fact that the middle value should
stay at a value of 45. We construct the system as before, but add::
middle.value = 45.0
solver.add_stay(middle)
Now when we interrogate the solver, we'll get values that reflect this fixed
point::
>>> left.value
40.0
>>> middle.value
45.0
>>> right.value
50.0
Constraint strength
-------------------
Not all constraints are equal. Some are absolute requirements - for example, a
requirement that all values remain in a specific range. However, other
constraints may be suggestions, rather than hard requirements.
To accommodate this, Cassowary allows all constraints to have a **strength**.
Strength can be one of:
* ``REQUIRED``
* ``STRONG``
* ``MEDIUM``
* ``WEAK``
``REQUIRED`` constraints **must** be satisfied; the remaining strengths will
be satisfied with declining priority.
To define a strength, provide the strength value as an argument when adding
the constraint (or stay)::
from cassowary import SimplexSolver, Variable, STRONG, WEAK
solver = SimplexSolver()
x = Variable('x')
# Define some non-required constraints
solver.add_constraint(x <= 100, strength=STRONG)
solver.add_stay(x, strength=WEAK)
Unless otherwise specified, all constraints are ``REQUIRED``.
Constraint weight
-----------------
If you have multiple constraints of the same strength, you may want to have a
tie-breaker between them. To do this, you can set a **weight**, in addition to
a strength::
from cassowary import SimplexSolver, Variable, STRONG
solver = SimplexSolver()
x = Variable('x')
# Define some non-required constraints
solver.add_constraint(x <= 100, strength=STRONG, weight=10)
solver.add_constraint(x >= 50, strength=STRONG, weight=20)
Editing constraints
-------------------
Any constraint can be removed from a system; just retain the reference provided
when you add the constraint::
from cassowary import SimplexSolver, Variable
solver = SimplexSolver()
x = Variable('x')
# Define a constraint
constraint = solver.add_constraint(x <= 100)
# Remove it again
solver.remove_constraint(constraint)
Once a constraint is removed, the system will be automatically re-evaluated,
with the possible side effect that the values in the system will change.
But what if you want to change a variable's value without introducing a
new constraint? In this case, you can use an edit context.
Here's an example of an edit context in practice::
from cassowary import SimplexSolver, Variable
solver = SimplexSolver()
x = Variable('x')
# Add a stay to x - that is, don't change the value.
solver.add_stay(x)
# Now, mark x as being editable...
solver.add_edit_variable(x)
# ... start and edit context...
with solver.edit():
# ... and suggest a new value for the variable.
solver.suggest_value(x, 42.0)
When the edit context exits, the system will re-evaluate itself, and the
variable will have the new value. However, the variable isn't guaranteed
to have the value you suggested - in this case it will, but if your
constraint system has other constraints, they may affect the value of
the variable after the suggestion has been applied.
All variables in the system will be re-evaluated when you leave the edit
context; however, if you need to force a re-evaluation in the middle of an
edit context, you can do so by calling :meth:`~cassowary.Solver.resolve()`.
|
PypiClean
|
/iqpp.plone.rotating-0.1b1.tar.gz/iqpp.plone.rotating-0.1b1/iqpp/plone/rotating/portlets/rotating.py
|
from zope.formlib import form
from zope.interface import implements
from zope import schema
# plone imports
from plone.app.portlets.portlets import base
from plone.portlets.interfaces import IPortletDataProvider
# Five imports
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
# CMFCore imports
from Products.CMFCore.utils import getToolByName
# iqpp.plone.rotating imports
from iqpp.plone.rotating.config import _
from iqpp.plone.rotating.interfaces import IRotating
class IRotatingPortlet(IPortletDataProvider):
"""
"""
name = schema.TextLine(
title=_(u'Title'),
description=_(u'The title of the portlet'),
required=True,
default=u"Title")
path = schema.TextLine(
title=_(u'Path To Folder'),
description=_(u'The source folder.'),
required=True,
default=u"")
limit = schema.Int(
title=_(u'Number of objects to display'),
description=_(u'How many objects to list.'),
required=True,
default=1)
class Assignment(base.Assignment):
"""
"""
implements(IRotatingPortlet)
def __init__(self, name=u"Rotating Objects", path=u"", limit=1):
"""
"""
self.name = name
self.path = path
self.limit = limit
@property
def title(self):
return _(u"Rotating")
class Renderer(base.Renderer):
"""
"""
render = ViewPageTemplateFile('rotating.pt')
def update(self):
"""
"""
mtool = getToolByName(self.context, "portal_membership")
if mtool.checkPermission("Manage portal", self.context):
self.isNoManager = False
else:
self.isNoManager = True
def getRotatingObjects(self):
"""
"""
path = self.data.path.encode("utf-8")
obj = self.context.restrictedTraverse(path)
return IRotating(obj).getItems(self.data.limit)
def title(self):
"""
"""
return self.data.name
class AddForm(base.AddForm):
"""
"""
form_fields = form.Fields(IRotatingPortlet)
label = _(u"Rotating Portlet")
description = _(u"This portlet displays rotating objects.")
def create(self, data):
"""
"""
return Assignment(
name = data.get("name", u"Title"),
path = data.get('path', u''),
limit = data.get('limit', 5),
)
class EditForm(base.EditForm):
"""
"""
form_fields = form.Fields(IRotatingPortlet)
label = _(u"Edit Rotating Portlet")
description = _(u"This portlet displays rotating objects.")
|
PypiClean
|
/brainbox-ibl-1.0.0.tar.gz/brainbox-ibl-1.0.0/ibllib/dsp/fourier.py
|
import numpy as np
def fscale(ns, si=1, one_sided=False):
"""
numpy.fft.fftfreq returns Nyquist as a negative frequency so we propose this instead
:param ns: number of samples
:param si: sampling interval in seconds
:param one_sided: if True, returns only positive frequencies
:return: fscale: numpy vector containing frequencies in Hertz
"""
fsc = np.arange(0, np.floor(ns / 2) + 1) / ns / si # sample the frequency scale
if one_sided:
return fsc
else:
return np.concatenate((fsc, -fsc[slice(-2 + (ns % 2), 0, -1)]), axis=0)
def freduce(x, axis=None):
"""
Reduces a spectrum to positive frequencies only
Works on the last dimension (contiguous in c-stored array)
:param x: numpy.ndarray
:param axis: axis along which to perform reduction (last axis by default)
:return: numpy.ndarray
"""
if axis is None:
axis = x.ndim - 1
siz = list(x.shape)
siz[axis] = int(np.floor(siz[axis] / 2 + 1))
return np.take(x, np.arange(0, siz[axis]), axis=axis)
def fexpand(x, ns=1, axis=None):
"""
Reconstructs full spectrum from positive frequencies
Works on the last dimension (contiguous in c-stored array)
:param x: numpy.ndarray
:param axis: axis along which to perform reduction (last axis by default)
:return: numpy.ndarray
"""
if axis is None:
axis = x.ndim - 1
# dec = int(ns % 2) * 2 - 1
# xcomp = np.conj(np.flip(x[..., 1:x.shape[-1] + dec], axis=axis))
ilast = int((ns + (ns % 2)) / 2)
xcomp = np.conj(np.flip(np.take(x, np.arange(1, ilast), axis=axis), axis=axis))
return np.concatenate((x, xcomp), axis=axis)
def bp(ts, si, b, axis=None):
"""
Band-pass filter in frequency domain
:param ts: time serie
:param si: sampling interval in seconds
:param b: cutout frequencies: 4 elements vector or list
:param axis: axis along which to perform reduction (last axis by default)
:return: filtered time serie
"""
return _freq_filter(ts, si, b, axis=axis, typ='bp')
def lp(ts, si, b, axis=None):
"""
Low-pass filter in frequency domain
:param ts: time serie
:param si: sampling interval in seconds
:param b: cutout frequencies: 2 elements vector or list
:param axis: axis along which to perform reduction (last axis by default)
:return: filtered time serie
"""
return _freq_filter(ts, si, b, axis=axis, typ='lp')
def hp(ts, si, b, axis=None):
"""
High-pass filter in frequency domain
:param ts: time serie
:param si: sampling interval in seconds
:param b: cutout frequencies: 2 elements vector or list
:param axis: axis along which to perform reduction (last axis by default)
:return: filtered time serie
"""
return _freq_filter(ts, si, b, axis=axis, typ='hp')
def _freq_filter(ts, si, b, axis=None, typ='lp'):
"""
Wrapper for hp/lp/bp filters
"""
if axis is None:
axis = ts.ndim - 1
ns = ts.shape[axis]
f = fscale(ns, si=si, one_sided=True)
if typ == 'bp':
filc = _freq_vector(f, b[0:2], typ='hp') * _freq_vector(f, b[2:4], typ='lp')
else:
filc = _freq_vector(f, b, typ=typ)
if axis < (ts.ndim - 1):
filc = filc[:, np.newaxis]
return np.real(np.fft.ifft(np.fft.fft(ts, axis=axis) * fexpand(filc, ns, axis=0), axis=axis))
def _freq_vector(f, b, typ='lp'):
"""
Returns a frequency modulated vector for filtering
:param f: frequency vector, uniform and monotonic
:param b: 2 bounds array
:return: amplitude modulated frequency vector
"""
filc = ((f <= b[0]).astype(float) +
np.bitwise_and(f > b[0], f < b[1]).astype(float) *
(0.5 * (1 + np.sin(np.pi * (f - ((b[0] + b[1]) / 2)) /
(b[0] - b[1])))))
if typ == 'hp':
return 1 - filc
elif typ == 'lp':
return filc
|
PypiClean
|
/OpenSRANE-0.0.3.tar.gz/OpenSRANE-0.0.3/opensrane/Plot/Matplot.py
|
import matplotlib.pyplot as _plt
from scipy.stats import norm as _norm
from scipy.stats import lognorm as _lognorm
import opensrane as _opr
import pandas as _pd
import numpy as _np
# from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
from plotly.offline import iplot as _iplot
import random as _rnd
def PlotUnits2D():
#Initial Figure Settings
#Initial Plot Settings---------------------------------------
fig, ax = _plt.subplots(figsize=(12, 10), dpi=80)
font1 = {'family':'serif','color':'blue','size':18}
font2 = {'family':'serif','color':'darkred','size':14}
_plt.title("PlantUnits",fontdict = font1,loc='left')
_plt.xlabel("X",fontdict = font2)
_plt.ylabel("Y",fontdict = font2)
ColBound=[0, 18/255, 255/255] # Boundary Colors
ColNodamage=[0, 18/255, 255/255] #Undamaged Colors
ColDamaged=[255/255, 0, 5/255] #Damaged Colors
#Get All Defined Plant Units
UnitObj=_opr.PlantUnits.ObjManager.Objlst
minx=None
maxx=None
miny=None
maxy=None
for Unit in UnitObj:
# Onground Tanks
if Unit.__class__==_opr.PlantUnits.ONGStorage:
# Get Geometry and Location Data
xc=Unit.Hlocalcoord
yc=Unit.Vlocalcoord
D=Unit.d_Storage
tag=Unit.tag
name=Unit.__class__.__name__
# Set The boundary
if minx==None:
minx=xc-D/2
maxx=xc+D/2
miny=yc-D/2
maxy=yc+D/2
else:
if xc-D/2<minx: minx=xc-D/2
if xc+D/2>maxx: maxx=xc+D/2
if yc-D/2<miny: miny=yc-D/2
if yc+D/2>maxy: maxy=yc+D/2
# Add Tank Shape
if Unit.isdamaged==False:
col2=ColNodamage
else:
col2=ColDamaged
ax.add_patch(_plt.Circle((xc, yc), D/2, alpha=0.3, linestyle ='-',linewidth =1,edgecolor =ColBound,facecolor =col2))
ax.scatter(xc, yc, color = ColBound, marker = 'o', s = 10)
#Plotly Settings ---------------------------------------------------------------------------------------------------------------------------------------
Ratio=10/12
L1, L2 = maxx-minx, maxy-miny
xc, yc= (maxx+minx)/2 , (maxy+miny)/2
if L2/L1>Ratio:
L1=L2/Ratio
minx=xc-L1/2
maxx=xc+L1/2
else:
L2=L1*Ratio
miny=yc-L2/2
maxy=yc+L2/2
_plt.xlim(minx-0.05*L1,maxx+0.05*L1)
_plt.ylim(miny-0.05*L2,maxy+0.05*L2)
_plt.show()
return
def PlotFragilities(StdNumber=3,NPoints=100):
stdN=StdNumber
N=NPoints
#Initial Plot Settings---------------------------------------
_plt.figure(figsize=(12, 10), dpi=80)
font1 = {'family':'serif','color':'blue','size':18}
font2 = {'family':'serif','color':'darkred','size':14}
_plt.title("Fragility Curves",fontdict = font1,loc='left')
_plt.xlabel("Random Variables",fontdict = font2)
_plt.ylabel("Probability",fontdict = font2)
#Get All defined Fragilities
FragTagObjs=_opr.Fragilities.ObjManager.TagObjDict
#Calculate Range of random of random variables---------------------------------
data={} #Dictionary for storing data
x=[] #Random Variables range
for tag,FragObj in FragTagObjs.items():
if FragObj.DistType=='normal' or FragObj.DistType=='lognormal':
minv=FragObj.mean-stdN*FragObj.StdDev
maxv=FragObj.mean+stdN*FragObj.StdDev
x=x+[minv+(maxv-minv)/N*x for x in range(N-1)]
x=filter(lambda x:x>=0,x) #Only Positive Random Variables Have meaning
x=set(x)
x=list(x)
x.sort()
#Calculate Probablity for each distribution and plot it------------------------------------
for tag,FragObj in FragTagObjs.items():
if FragObj.DistType=='normal' or FragObj.DistType=='lognormal':
y=[FragObj.GetProbability(x) for x in x]
lbl=f'tag{tag}, {FragObj.modename}'
_plt.fill_between(x,y,color=(_rnd.random(), _rnd.random(), _rnd.random(), 0.3),label=lbl)
_plt.grid(color = 'green', linestyle = '--', linewidth = 0.4)
_plt.legend(loc=2)
_plt.draw()
_plt.xlim(0, max(x))
_plt.ylim(0, 1.05)
return
def PlotHazard():
'''
The First Hazard Object that are defined by the user, is drawn using This Function
'''
#Initial Plot Settings---------------------------------------
_plt.figure(figsize=(12, 10), dpi=80)
font1 = {'family':'serif','color':'blue','size':18}
font2 = {'family':'serif','color':'darkred','size':14}
_plt.title("Hazard Curve",fontdict = font1,loc='left')
_plt.xlabel("Magnitude",fontdict = font2)
_plt.ylabel("Probability",fontdict = font2)
#Get The first Hazard Object
HazarbObj=_opr.Hazard.ObjManager.Objlst[0]
#Get Hazard Curve Values----------------------------------------------------
x=HazarbObj.Magnitude
y=HazarbObj.Probabilities
lbl='Defined Hazard'
_plt.xlim(min(x), max(x))
_plt.ylim(min(y), max(y))
_plt.fill_between(x,y,color=(_rnd.random(), _rnd.random(), _rnd.random(), 0.3),label=lbl)
_plt.grid(color = 'green', linestyle = '--', linewidth = 0.4)
_plt.legend(loc=1)
_plt.draw()
return
|
PypiClean
|
/MDSuite-0.2.0-py3-none-any.whl/mdsuite/calculators/green_kubo_thermal_conductivity.py
|
from abc import ABC
from dataclasses import dataclass
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from bokeh.models import Span
from tqdm import tqdm
from mdsuite.calculators.calculator import call
from mdsuite.calculators.trajectory_calculator import TrajectoryCalculator
from mdsuite.database.mdsuite_properties import mdsuite_properties
@dataclass
class Args:
"""
Data class for the saved properties.
"""
data_range: int
correlation_time: int
tau_values: np.s_
atom_selection: np.s_
integration_range: int
class GreenKuboThermalConductivity(TrajectoryCalculator, ABC):
"""
Class for the Green-Kubo Thermal conductivity implementation
Attributes
----------
experiment : object
Experiment class to call from
x_label : str
X label of the tensor_values when plotted
y_label : str
Y label of the tensor_values when plotted
analysis_name : str
Name of the analysis
See Also
--------
mdsuite.calculators.calculator.Calculator class
Examples
--------
experiment.run_computation.GreenKuboThermalConductivity(data_range=500,
plot=True, correlation_time=10)
"""
def __init__(self, **kwargs):
"""
Class for the Green-Kubo Thermal conductivity implementation
Attributes
----------
experiment : object
Experiment class to call from
"""
super().__init__(**kwargs)
self.scale_function = {"linear": {"scale_factor": 5}}
self.loaded_property = mdsuite_properties.thermal_flux
self.system_property = True
self.x_label = r"$$\text{Time} / s$$"
self.y_label = r"$$\text{JACF} / ($C^{2}\cdot m^{2}/s^{2}$$"
self.analysis_name = "Green_Kubo_Thermal_Conductivity"
self._dtype = tf.float64
@call
def __call__(
self,
plot=False,
data_range=500,
tau_values: np.s_ = np.s_[:],
correlation_time: int = 1,
integration_range: int = None,
):
"""
Class for the Green-Kubo Thermal conductivity implementation
Parameters
----------
plot : bool
if true, plot the output.
data_range : int
Data range to use in the analysis.
correlation_time : int
Correlation time to use in the window sampling.
integration_range : int
Range over which the integration should be performed.
"""
self.plot = plot
self.jacf: np.ndarray
self.prefactor: float
self.sigma = []
if integration_range is None:
integration_range = data_range
# set args that will affect the computation result
self.args = Args(
data_range=data_range,
correlation_time=correlation_time,
tau_values=tau_values,
atom_selection=np.s_[:],
integration_range=integration_range,
)
self.time = self._handle_tau_values()
self.jacf = np.zeros(self.data_resolution)
def check_input(self):
"""
Check the user input to ensure no conflicts are present.
Returns
-------
"""
self._run_dependency_check()
def _calculate_prefactor(self):
"""
Compute the ionic conductivity pre-factor.
Returns
-------
"""
# Calculate the prefactor
# prepare the prefactor for the integral
numerator = 1
denominator = (
3
* (self.args.data_range - 1)
* self.experiment.temperature**2
* self.experiment.units.boltzmann
* self.experiment.volume
)
prefactor_units = (
self.experiment.units.energy
/ self.experiment.units.length
/ self.experiment.units.time
)
self.prefactor = (numerator / denominator) * prefactor_units
def _apply_averaging_factor(self):
"""
Apply the averaging factor to the msd array.
Returns
-------
"""
pass
def ensemble_operation(self, ensemble: tf.Tensor):
"""
Calculate and return the msd.
Parameters
----------
ensemble : tf.Tenor
Ensemble to analyze.
Returns
-------
MSD of the tensor_values.
"""
jacf = self.args.data_range * tf.reduce_sum(
tfp.stats.auto_correlation(ensemble, normalize=False, axis=0, center=False),
axis=-1,
)
self.jacf += jacf
self.sigma.append(
np.trapz(
jacf[: self.args.integration_range],
x=self.time[: self.args.integration_range],
)
)
def _post_operation_processes(self):
"""
call the post-op processes
Returns
-------
"""
result = self.prefactor * np.array(self.sigma)
data = {
"computation_results": result[0],
"uncertainty": result[1],
"time": self.time.tolist(),
"acf": self.jacf.numpy().tolist(),
}
self.queue_data(data=data, subjects=["System"])
# Update the plot if required
if self.plot:
span = Span(
location=(np.array(self.time) * self.experiment.units.time)[
self.args.integration_range - 1
],
dimension="height",
line_dash="dashed",
)
self.run_visualization(
x_data=np.array(self.time) * self.experiment.units.time,
y_data=self.jacf.numpy(),
title=f"{result[0]} +- {result[1]}",
layouts=[span],
)
def run_calculator(self):
"""
Run analysis.
Returns
-------
"""
self.check_input()
# Compute the pre-factor early.
self._calculate_prefactor()
dict_ref = str.encode(
"/".join([self.loaded_property.name, self.loaded_property.name])
)
batch_ds = self.get_batch_dataset([self.loaded_property.name])
for batch in tqdm(
batch_ds,
ncols=70,
total=self.n_batches,
disable=self.memory_manager.minibatch,
):
ensemble_ds = self.get_ensemble_dataset(batch, self.loaded_property.name)
for ensemble in ensemble_ds:
self.ensemble_operation(ensemble[dict_ref])
# Scale, save, and plot the data.
self._apply_averaging_factor()
self._post_operation_processes()
|
PypiClean
|
/ensmallen_graph-0.6.0-cp37-cp37m-manylinux2010_x86_64.whl/ensmallen_graph/datasets/string/deinococcuspimensis.py
|
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def DeinococcusPimensis(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Deinococcus pimensis graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Deinococcus pimensis graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:56:15.665833
The undirected graph Deinococcus pimensis has 5072 nodes and 337553 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.02625 and has 79 connected components, where the component with most
nodes has 4846 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 99, the mean node degree is 133.10, and
the node degree mode is 2. The top 5 most central nodes are 926554.KI912660_gene2424
(degree 1448), 926554.KI912619_gene945 (degree 1355), 926554.KI912656_gene4233
(degree 1313), 926554.KI912636_gene3108 (degree 1222) and 926554.KI912656_gene4267
(degree 1132).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import DeinococcusPimensis
# Then load the graph
graph = DeinococcusPimensis()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="DeinococcusPimensis",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
PypiClean
|
/prefect_snowflake-0.26.1-py3-none-any.whl/prefect_snowflake/database.py
|
import asyncio
from typing import Any, Dict, List, Optional, Tuple, Union
from prefect import task
from prefect.blocks.abstract import DatabaseBlock
from prefect.utilities.asyncutils import run_sync_in_worker_thread, sync_compatible
from prefect.utilities.hashing import hash_objects
from pydantic import Field
from snowflake.connector.connection import SnowflakeConnection
from snowflake.connector.cursor import SnowflakeCursor
from prefect_snowflake import SnowflakeCredentials
BEGIN_TRANSACTION_STATEMENT = "BEGIN TRANSACTION"
END_TRANSACTION_STATEMENT = "COMMIT"
class SnowflakeConnector(DatabaseBlock):
"""
Block used to manage connections with Snowflake.
Upon instantiating, a connection is created and maintained for the life of
the object until the close method is called.
It is recommended to use this block as a context manager, which will automatically
close the engine and its connections when the context is exited.
It is also recommended that this block is loaded and consumed within a single task
or flow because if the block is passed across separate tasks and flows,
the state of the block's connection and cursor will be lost.
Args:
credentials: The credentials to authenticate with Snowflake.
database: The name of the default database to use.
warehouse: The name of the default warehouse to use.
schema: The name of the default schema to use;
this attribute is accessible through `SnowflakeConnector(...).schema_`.
fetch_size: The number of rows to fetch at a time.
poll_frequency_s: The number of seconds before checking query.
Examples:
Load stored Snowflake connector as a context manager:
```python
from prefect_snowflake.database import SnowflakeConnector
snowflake_connector = SnowflakeConnector.load("BLOCK_NAME"):
```
Insert data into database and fetch results.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Space"},
{"name": "Me", "address": "Myway 88"},
],
)
results = conn.fetch_all(
"SELECT * FROM customers WHERE address = %(address)s",
parameters={"address": "Space"}
)
print(results)
```
""" # noqa
_block_type_name = "Snowflake Connector"
_logo_url = "https://images.ctfassets.net/gm98wzqotmnx/2DxzAeTM9eHLDcRQx1FR34/f858a501cdff918d398b39365ec2150f/snowflake.png?h=250" # noqa
_documentation_url = "https://prefecthq.github.io/prefect-snowflake/database/#prefect_snowflake.database.SnowflakeConnector" # noqa
_description = "Perform data operations against a Snowflake database."
credentials: SnowflakeCredentials = Field(
default=..., description="The credentials to authenticate with Snowflake."
)
database: str = Field(
default=..., description="The name of the default database to use."
)
warehouse: str = Field(
default=..., description="The name of the default warehouse to use."
)
schema_: str = Field(
default=...,
alias="schema",
description="The name of the default schema to use.",
)
fetch_size: int = Field(
default=1, description="The default number of rows to fetch at a time."
)
poll_frequency_s: int = Field(
default=1,
title="Poll Frequency [seconds]",
description=(
"The number of seconds between checking query "
"status for long running queries."
),
)
_connection: Optional[SnowflakeConnection] = None
_unique_cursors: Dict[str, SnowflakeCursor] = None
def get_connection(self, **connect_kwargs: Dict[str, Any]) -> SnowflakeConnection:
"""
Returns an authenticated connection that can be
used to query from Snowflake databases.
Args:
**connect_kwargs: Additional arguments to pass to
`snowflake.connector.connect`.
Returns:
The authenticated SnowflakeConnection.
Examples:
```python
from prefect_snowflake.credentials import SnowflakeCredentials
from prefect_snowflake.database import SnowflakeConnector
snowflake_credentials = SnowflakeCredentials(
account="account",
user="user",
password="password",
)
snowflake_connector = SnowflakeConnector(
database="database",
warehouse="warehouse",
schema="schema",
credentials=snowflake_credentials
)
with snowflake_connector.get_connection() as connection:
...
```
"""
if self._connection is not None:
return self._connection
connect_params = {
"database": self.database,
"warehouse": self.warehouse,
"schema": self.schema_,
}
connection = self.credentials.get_client(**connect_kwargs, **connect_params)
self._connection = connection
self.logger.info("Started a new connection to Snowflake.")
return connection
def _start_connection(self):
"""
Starts Snowflake database connection.
"""
self.get_connection()
if self._unique_cursors is None:
self._unique_cursors = {}
def _get_cursor(self, inputs: Dict[str, Any]) -> Tuple[bool, SnowflakeCursor]:
"""
Get a Snowflake cursor.
Args:
inputs: The inputs to generate a unique hash, used to decide
whether a new cursor should be used.
Returns:
Whether a cursor is new and a Snowflake cursor.
"""
self._start_connection()
input_hash = hash_objects(inputs)
if input_hash is None:
raise RuntimeError(
"We were not able to hash your inputs, "
"which resulted in an unexpected data return; "
"please open an issue with a reproducible example."
)
if input_hash not in self._unique_cursors.keys():
new_cursor = self._connection.cursor()
self._unique_cursors[input_hash] = new_cursor
return True, new_cursor
else:
existing_cursor = self._unique_cursors[input_hash]
return False, existing_cursor
async def _execute_async(self, cursor: SnowflakeCursor, inputs: Dict[str, Any]):
"""Helper method to execute operations asynchronously."""
response = await run_sync_in_worker_thread(cursor.execute_async, **inputs)
self.logger.info(
f"Executing the operation, {inputs['command']!r}, asynchronously; "
f"polling for the result every {self.poll_frequency_s} seconds."
)
query_id = response["queryId"]
while self._connection.is_still_running(
await run_sync_in_worker_thread(
self._connection.get_query_status_throw_if_error, query_id
)
):
await asyncio.sleep(self.poll_frequency_s)
await run_sync_in_worker_thread(cursor.get_results_from_sfqid, query_id)
def reset_cursors(self) -> None:
"""
Tries to close all opened cursors.
Examples:
Reset the cursors to refresh cursor position.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Space"},
{"name": "Me", "address": "Myway 88"},
],
)
print(conn.fetch_one("SELECT * FROM customers")) # Ford
conn.reset_cursors()
print(conn.fetch_one("SELECT * FROM customers")) # should be Ford again
```
""" # noqa
if not self._unique_cursors:
self.logger.info("There were no cursors to reset.")
return
input_hashes = tuple(self._unique_cursors.keys())
for input_hash in input_hashes:
cursor = self._unique_cursors.pop(input_hash)
try:
cursor.close()
except Exception as exc:
self.logger.warning(
f"Failed to close cursor for input hash {input_hash!r}: {exc}"
)
self.logger.info("Successfully reset the cursors.")
@sync_compatible
async def fetch_one(
self,
operation: str,
parameters: Optional[Dict[str, Any]] = None,
**execute_kwargs: Dict[str, Any],
) -> Tuple[Any]:
"""
Fetch a single result from the database.
Repeated calls using the same inputs to *any* of the fetch methods of this
block will skip executing the operation again, and instead,
return the next set of results from the previous execution,
until the reset_cursors method is called.
Args:
operation: The SQL query or other operation to be executed.
parameters: The parameters for the operation.
**execute_kwargs: Additional options to pass to `cursor.execute_async`.
Returns:
A tuple containing the data returned by the database,
where each row is a tuple and each column is a value in the tuple.
Examples:
Fetch one row from the database where address is Space.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Space"},
{"name": "Me", "address": "Myway 88"},
],
)
result = conn.fetch_one(
"SELECT * FROM customers WHERE address = %(address)s",
parameters={"address": "Space"}
)
print(result)
```
""" # noqa
inputs = dict(
command=operation,
params=parameters,
**execute_kwargs,
)
new, cursor = self._get_cursor(inputs)
if new:
await self._execute_async(cursor, inputs)
self.logger.debug("Preparing to fetch a row.")
result = await run_sync_in_worker_thread(cursor.fetchone)
return result
@sync_compatible
async def fetch_many(
self,
operation: str,
parameters: Optional[Dict[str, Any]] = None,
size: Optional[int] = None,
**execute_kwargs: Dict[str, Any],
) -> List[Tuple[Any]]:
"""
Fetch a limited number of results from the database.
Repeated calls using the same inputs to *any* of the fetch methods of this
block will skip executing the operation again, and instead,
return the next set of results from the previous execution,
until the reset_cursors method is called.
Args:
operation: The SQL query or other operation to be executed.
parameters: The parameters for the operation.
size: The number of results to return; if None or 0, uses the value of
`fetch_size` configured on the block.
**execute_kwargs: Additional options to pass to `cursor.execute_async`.
Returns:
A list of tuples containing the data returned by the database,
where each row is a tuple and each column is a value in the tuple.
Examples:
Repeatedly fetch two rows from the database where address is Highway 42.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Marvin", "address": "Highway 42"},
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Highway 42"},
{"name": "Me", "address": "Highway 42"},
],
)
result = conn.fetch_many(
"SELECT * FROM customers WHERE address = %(address)s",
parameters={"address": "Highway 42"},
size=2
)
print(result) # Marvin, Ford
result = conn.fetch_many(
"SELECT * FROM customers WHERE address = %(address)s",
parameters={"address": "Highway 42"},
size=2
)
print(result) # Unknown, Me
```
""" # noqa
inputs = dict(
command=operation,
params=parameters,
**execute_kwargs,
)
new, cursor = self._get_cursor(inputs)
if new:
await self._execute_async(cursor, inputs)
size = size or self.fetch_size
self.logger.debug(f"Preparing to fetch {size} rows.")
result = await run_sync_in_worker_thread(cursor.fetchmany, size=size)
return result
@sync_compatible
async def fetch_all(
self,
operation: str,
parameters: Optional[Dict[str, Any]] = None,
**execute_kwargs: Dict[str, Any],
) -> List[Tuple[Any]]:
"""
Fetch all results from the database.
Repeated calls using the same inputs to *any* of the fetch methods of this
block will skip executing the operation again, and instead,
return the next set of results from the previous execution,
until the reset_cursors method is called.
Args:
operation: The SQL query or other operation to be executed.
parameters: The parameters for the operation.
**execute_kwargs: Additional options to pass to `cursor.execute_async`.
Returns:
A list of tuples containing the data returned by the database,
where each row is a tuple and each column is a value in the tuple.
Examples:
Fetch all rows from the database where address is Highway 42.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Marvin", "address": "Highway 42"},
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Highway 42"},
{"name": "Me", "address": "Myway 88"},
],
)
result = conn.fetch_all(
"SELECT * FROM customers WHERE address = %(address)s",
parameters={"address": "Highway 42"},
)
print(result) # Marvin, Ford, Unknown
```
""" # noqa
inputs = dict(
command=operation,
params=parameters,
**execute_kwargs,
)
new, cursor = self._get_cursor(inputs)
if new:
await self._execute_async(cursor, inputs)
self.logger.debug("Preparing to fetch all rows.")
result = await run_sync_in_worker_thread(cursor.fetchall)
return result
@sync_compatible
async def execute(
self,
operation: str,
parameters: Optional[Dict[str, Any]] = None,
**execute_kwargs: Dict[str, Any],
) -> None:
"""
Executes an operation on the database. This method is intended to be used
for operations that do not return data, such as INSERT, UPDATE, or DELETE.
Unlike the fetch methods, this method will always execute the operation
upon calling.
Args:
operation: The SQL query or other operation to be executed.
parameters: The parameters for the operation.
**execute_kwargs: Additional options to pass to `cursor.execute_async`.
Examples:
Create table named customers with two columns, name and address.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
```
""" # noqa
self._start_connection()
inputs = dict(
command=operation,
params=parameters,
**execute_kwargs,
)
with self._connection.cursor() as cursor:
await run_sync_in_worker_thread(cursor.execute, **inputs)
self.logger.info(f"Executed the operation, {operation!r}.")
@sync_compatible
async def execute_many(
self,
operation: str,
seq_of_parameters: List[Dict[str, Any]],
) -> None:
"""
Executes many operations on the database. This method is intended to be used
for operations that do not return data, such as INSERT, UPDATE, or DELETE.
Unlike the fetch methods, this method will always execute the operations
upon calling.
Args:
operation: The SQL query or other operation to be executed.
seq_of_parameters: The sequence of parameters for the operation.
Examples:
Create table and insert three rows into it.
```python
from prefect_snowflake.database import SnowflakeConnector
with SnowflakeConnector.load("BLOCK_NAME") as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS customers (name varchar, address varchar);"
)
conn.execute_many(
"INSERT INTO customers (name, address) VALUES (%(name)s, %(address)s);",
seq_of_parameters=[
{"name": "Marvin", "address": "Highway 42"},
{"name": "Ford", "address": "Highway 42"},
{"name": "Unknown", "address": "Space"},
],
)
```
""" # noqa
self._start_connection()
inputs = dict(
command=operation,
seqparams=seq_of_parameters,
)
with self._connection.cursor() as cursor:
await run_sync_in_worker_thread(cursor.executemany, **inputs)
self.logger.info(
f"Executed {len(seq_of_parameters)} operations off {operation!r}."
)
def close(self):
"""
Closes connection and its cursors.
"""
try:
self.reset_cursors()
finally:
if self._connection is None:
self.logger.info("There was no connection open to be closed.")
return
self._connection.close()
self._connection = None
self.logger.info("Successfully closed the Snowflake connection.")
def __enter__(self):
"""
Start a connection upon entry.
"""
return self
def __exit__(self, *args):
"""
Closes connection and its cursors upon exit.
"""
self.close()
def __getstate__(self):
"""Allows block to be pickled and dumped."""
data = self.__dict__.copy()
data.update({k: None for k in {"_connection", "_unique_cursors"}})
return data
def __setstate__(self, data: dict):
"""Reset connection and cursors upon loading."""
self.__dict__.update(data)
self._start_connection()
@task
async def snowflake_query(
query: str,
snowflake_connector: SnowflakeConnector,
params: Union[Tuple[Any], Dict[str, Any]] = None,
cursor_type: SnowflakeCursor = SnowflakeCursor,
poll_frequency_seconds: int = 1,
) -> List[Tuple[Any]]:
"""
Executes a query against a Snowflake database.
Args:
query: The query to execute against the database.
params: The params to replace the placeholders in the query.
snowflake_connector: The credentials to use to authenticate.
cursor_type: The type of database cursor to use for the query.
poll_frequency_seconds: Number of seconds to wait in between checks for
run completion.
Returns:
The output of `response.fetchall()`.
Examples:
Query Snowflake table with the ID value parameterized.
```python
from prefect import flow
from prefect_snowflake.credentials import SnowflakeCredentials
from prefect_snowflake.database import SnowflakeConnector, snowflake_query
@flow
def snowflake_query_flow():
snowflake_credentials = SnowflakeCredentials(
account="account",
user="user",
password="password",
)
snowflake_connector = SnowflakeConnector(
database="database",
warehouse="warehouse",
schema="schema",
credentials=snowflake_credentials
)
result = snowflake_query(
"SELECT * FROM table WHERE id=%{id_param}s LIMIT 8;",
snowflake_connector,
params={"id_param": 1}
)
return result
snowflake_query_flow()
```
"""
# context manager automatically rolls back failed transactions and closes
with snowflake_connector.get_connection() as connection:
with connection.cursor(cursor_type) as cursor:
response = cursor.execute_async(query, params=params)
query_id = response["queryId"]
while connection.is_still_running(
connection.get_query_status_throw_if_error(query_id)
):
await asyncio.sleep(poll_frequency_seconds)
cursor.get_results_from_sfqid(query_id)
result = cursor.fetchall()
return result
@task
async def snowflake_multiquery(
queries: List[str],
snowflake_connector: SnowflakeConnector,
params: Union[Tuple[Any], Dict[str, Any]] = None,
cursor_type: SnowflakeCursor = SnowflakeCursor,
as_transaction: bool = False,
return_transaction_control_results: bool = False,
poll_frequency_seconds: int = 1,
) -> List[List[Tuple[Any]]]:
"""
Executes multiple queries against a Snowflake database in a shared session.
Allows execution in a transaction.
Args:
queries: The list of queries to execute against the database.
params: The params to replace the placeholders in the query.
snowflake_connector: The credentials to use to authenticate.
cursor_type: The type of database cursor to use for the query.
as_transaction: If True, queries are executed in a transaction.
return_transaction_control_results: Determines if the results of queries
controlling the transaction (BEGIN/COMMIT) should be returned.
poll_frequency_seconds: Number of seconds to wait in between checks for
run completion.
Returns:
List of the outputs of `response.fetchall()` for each query.
Examples:
Query Snowflake table with the ID value parameterized.
```python
from prefect import flow
from prefect_snowflake.credentials import SnowflakeCredentials
from prefect_snowflake.database import SnowflakeConnector, snowflake_multiquery
@flow
def snowflake_multiquery_flow():
snowflake_credentials = SnowflakeCredentials(
account="account",
user="user",
password="password",
)
snowflake_connector = SnowflakeConnector(
database="database",
warehouse="warehouse",
schema="schema",
credentials=snowflake_credentials
)
result = snowflake_multiquery(
["SELECT * FROM table WHERE id=%{id_param}s LIMIT 8;", "SELECT 1,2"],
snowflake_connector,
params={"id_param": 1},
as_transaction=True
)
return result
snowflake_multiquery_flow()
```
"""
with snowflake_connector.get_connection() as connection:
if as_transaction:
queries.insert(0, BEGIN_TRANSACTION_STATEMENT)
queries.append(END_TRANSACTION_STATEMENT)
with connection.cursor(cursor_type) as cursor:
results = []
for query in queries:
response = cursor.execute_async(query, params=params)
query_id = response["queryId"]
while connection.is_still_running(
connection.get_query_status_throw_if_error(query_id)
):
await asyncio.sleep(poll_frequency_seconds)
cursor.get_results_from_sfqid(query_id)
result = cursor.fetchall()
results.append(result)
# cut off results from BEGIN/COMMIT queries
if as_transaction and not return_transaction_control_results:
return results[1:-1]
else:
return results
@task
async def snowflake_query_sync(
query: str,
snowflake_connector: SnowflakeConnector,
params: Union[Tuple[Any], Dict[str, Any]] = None,
cursor_type: SnowflakeCursor = SnowflakeCursor,
) -> List[Tuple[Any]]:
"""
Executes a query in sync mode against a Snowflake database.
Args:
query: The query to execute against the database.
params: The params to replace the placeholders in the query.
snowflake_connector: The credentials to use to authenticate.
cursor_type: The type of database cursor to use for the query.
Returns:
The output of `response.fetchall()`.
Examples:
Execute a put statement.
```python
from prefect import flow
from prefect_snowflake.credentials import SnowflakeCredentials
from prefect_snowflake.database import SnowflakeConnector, snowflake_query
@flow
def snowflake_query_sync_flow():
snowflake_credentials = SnowflakeCredentials(
account="account",
user="user",
password="password",
)
snowflake_connector = SnowflakeConnector(
database="database",
warehouse="warehouse",
schema="schema",
credentials=snowflake_credentials
)
result = snowflake_query_sync(
"put file://afile.csv @mystage;",
snowflake_connector,
)
return result
snowflake_query_sync_flow()
```
"""
# context manager automatically rolls back failed transactions and closes
with snowflake_connector.get_connection() as connection:
with connection.cursor(cursor_type) as cursor:
cursor.execute(query, params=params)
result = cursor.fetchall()
return result
|
PypiClean
|
/tb-rest-client-3.5.tar.gz/tb-rest-client-3.5/tb_rest_client/models/models_pe/alarm_info.py
|
# Copyright 2023. ThingsBoard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pprint
import re # noqa: F401
import six
class AlarmInfo(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'AlarmId',
'created_time': 'int',
'tenant_id': 'TenantId',
'customer_id': 'CustomerId',
'name': 'str',
'type': 'str',
'originator': 'EntityId',
'severity': 'str',
'acknowledged': 'bool',
'cleared': 'bool',
'assignee_id': 'UserId',
'start_ts': 'int',
'end_ts': 'int',
'ack_ts': 'int',
'clear_ts': 'int',
'assign_ts': 'int',
'details': 'JsonNode',
'propagate_to_owner_hierarchy': 'bool',
'propagate': 'bool',
'propagate_to_tenant': 'bool',
'propagate_relation_types': 'list[str]',
'propagate_to_owner': 'bool',
'originator_name': 'str',
'originator_label': 'str',
'assignee': 'AlarmAssignee',
'status': 'str'
}
attribute_map = {
'id': 'id',
'created_time': 'createdTime',
'tenant_id': 'tenantId',
'customer_id': 'customerId',
'name': 'name',
'type': 'type',
'originator': 'originator',
'severity': 'severity',
'acknowledged': 'acknowledged',
'cleared': 'cleared',
'assignee_id': 'assigneeId',
'start_ts': 'startTs',
'end_ts': 'endTs',
'ack_ts': 'ackTs',
'clear_ts': 'clearTs',
'assign_ts': 'assignTs',
'details': 'details',
'propagate_to_owner_hierarchy': 'propagateToOwnerHierarchy',
'propagate': 'propagate',
'propagate_to_tenant': 'propagateToTenant',
'propagate_relation_types': 'propagateRelationTypes',
'propagate_to_owner': 'propagateToOwner',
'originator_name': 'originatorName',
'originator_label': 'originatorLabel',
'assignee': 'assignee',
'status': 'status'
}
def __init__(self, id=None, created_time=None, tenant_id=None, customer_id=None, name=None, type=None, originator=None, severity=None, acknowledged=None, cleared=None, assignee_id=None, start_ts=None, end_ts=None, ack_ts=None, clear_ts=None, assign_ts=None, details=None, propagate_to_owner_hierarchy=None, propagate=None, propagate_to_tenant=None, propagate_relation_types=None, propagate_to_owner=None, originator_name=None, originator_label=None, assignee=None, status=None): # noqa: E501
"""AlarmInfo - a model defined in Swagger""" # noqa: E501
self._id = None
self._created_time = None
self._tenant_id = None
self._customer_id = None
self._name = None
self._type = None
self._originator = None
self._severity = None
self._acknowledged = None
self._cleared = None
self._assignee_id = None
self._start_ts = None
self._end_ts = None
self._ack_ts = None
self._clear_ts = None
self._assign_ts = None
self._details = None
self._propagate_to_owner_hierarchy = None
self._propagate = None
self._propagate_to_tenant = None
self._propagate_relation_types = None
self._propagate_to_owner = None
self._originator_name = None
self._originator_label = None
self._assignee = None
self._status = None
self.discriminator = None
if id is not None:
self.id = id
if created_time is not None:
self.created_time = created_time
if tenant_id is not None:
self.tenant_id = tenant_id
if customer_id is not None:
self.customer_id = customer_id
self.name = name
self.type = type
self.originator = originator
self.severity = severity
self.acknowledged = acknowledged
self.cleared = cleared
if assignee_id is not None:
self.assignee_id = assignee_id
if start_ts is not None:
self.start_ts = start_ts
if end_ts is not None:
self.end_ts = end_ts
if ack_ts is not None:
self.ack_ts = ack_ts
if clear_ts is not None:
self.clear_ts = clear_ts
if assign_ts is not None:
self.assign_ts = assign_ts
if details is not None:
self.details = details
if propagate_to_owner_hierarchy is not None:
self.propagate_to_owner_hierarchy = propagate_to_owner_hierarchy
if propagate is not None:
self.propagate = propagate
if propagate_to_tenant is not None:
self.propagate_to_tenant = propagate_to_tenant
if propagate_relation_types is not None:
self.propagate_relation_types = propagate_relation_types
if propagate_to_owner is not None:
self.propagate_to_owner = propagate_to_owner
if originator_name is not None:
self.originator_name = originator_name
if originator_label is not None:
self.originator_label = originator_label
if assignee is not None:
self.assignee = assignee
self.status = status
@property
def id(self):
"""Gets the id of this AlarmInfo. # noqa: E501
:return: The id of this AlarmInfo. # noqa: E501
:rtype: AlarmId
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this AlarmInfo.
:param id: The id of this AlarmInfo. # noqa: E501
:type: AlarmId
"""
self._id = id
@property
def created_time(self):
"""Gets the created_time of this AlarmInfo. # noqa: E501
Timestamp of the alarm creation, in milliseconds # noqa: E501
:return: The created_time of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._created_time
@created_time.setter
def created_time(self, created_time):
"""Sets the created_time of this AlarmInfo.
Timestamp of the alarm creation, in milliseconds # noqa: E501
:param created_time: The created_time of this AlarmInfo. # noqa: E501
:type: int
"""
self._created_time = created_time
@property
def tenant_id(self):
"""Gets the tenant_id of this AlarmInfo. # noqa: E501
:return: The tenant_id of this AlarmInfo. # noqa: E501
:rtype: TenantId
"""
return self._tenant_id
@tenant_id.setter
def tenant_id(self, tenant_id):
"""Sets the tenant_id of this AlarmInfo.
:param tenant_id: The tenant_id of this AlarmInfo. # noqa: E501
:type: TenantId
"""
self._tenant_id = tenant_id
@property
def customer_id(self):
"""Gets the customer_id of this AlarmInfo. # noqa: E501
:return: The customer_id of this AlarmInfo. # noqa: E501
:rtype: CustomerId
"""
return self._customer_id
@customer_id.setter
def customer_id(self, customer_id):
"""Sets the customer_id of this AlarmInfo.
:param customer_id: The customer_id of this AlarmInfo. # noqa: E501
:type: CustomerId
"""
self._customer_id = customer_id
@property
def name(self):
"""Gets the name of this AlarmInfo. # noqa: E501
representing type of the Alarm # noqa: E501
:return: The name of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this AlarmInfo.
representing type of the Alarm # noqa: E501
:param name: The name of this AlarmInfo. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def type(self):
"""Gets the type of this AlarmInfo. # noqa: E501
representing type of the Alarm # noqa: E501
:return: The type of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this AlarmInfo.
representing type of the Alarm # noqa: E501
:param type: The type of this AlarmInfo. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def originator(self):
"""Gets the originator of this AlarmInfo. # noqa: E501
:return: The originator of this AlarmInfo. # noqa: E501
:rtype: EntityId
"""
return self._originator
@originator.setter
def originator(self, originator):
"""Sets the originator of this AlarmInfo.
:param originator: The originator of this AlarmInfo. # noqa: E501
:type: EntityId
"""
if originator is None:
raise ValueError("Invalid value for `originator`, must not be `None`") # noqa: E501
self._originator = originator
@property
def severity(self):
"""Gets the severity of this AlarmInfo. # noqa: E501
Alarm severity # noqa: E501
:return: The severity of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._severity
@severity.setter
def severity(self, severity):
"""Sets the severity of this AlarmInfo.
Alarm severity # noqa: E501
:param severity: The severity of this AlarmInfo. # noqa: E501
:type: str
"""
if severity is None:
raise ValueError("Invalid value for `severity`, must not be `None`") # noqa: E501
allowed_values = ["CRITICAL", "INDETERMINATE", "MAJOR", "MINOR", "WARNING"] # noqa: E501
if severity not in allowed_values:
raise ValueError(
"Invalid value for `severity` ({0}), must be one of {1}" # noqa: E501
.format(severity, allowed_values)
)
self._severity = severity
@property
def acknowledged(self):
"""Gets the acknowledged of this AlarmInfo. # noqa: E501
Acknowledged # noqa: E501
:return: The acknowledged of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._acknowledged
@acknowledged.setter
def acknowledged(self, acknowledged):
"""Sets the acknowledged of this AlarmInfo.
Acknowledged # noqa: E501
:param acknowledged: The acknowledged of this AlarmInfo. # noqa: E501
:type: bool
"""
if acknowledged is None:
raise ValueError("Invalid value for `acknowledged`, must not be `None`") # noqa: E501
self._acknowledged = acknowledged
@property
def cleared(self):
"""Gets the cleared of this AlarmInfo. # noqa: E501
Cleared # noqa: E501
:return: The cleared of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._cleared
@cleared.setter
def cleared(self, cleared):
"""Sets the cleared of this AlarmInfo.
Cleared # noqa: E501
:param cleared: The cleared of this AlarmInfo. # noqa: E501
:type: bool
"""
if cleared is None:
raise ValueError("Invalid value for `cleared`, must not be `None`") # noqa: E501
self._cleared = cleared
@property
def assignee_id(self):
"""Gets the assignee_id of this AlarmInfo. # noqa: E501
:return: The assignee_id of this AlarmInfo. # noqa: E501
:rtype: UserId
"""
return self._assignee_id
@assignee_id.setter
def assignee_id(self, assignee_id):
"""Sets the assignee_id of this AlarmInfo.
:param assignee_id: The assignee_id of this AlarmInfo. # noqa: E501
:type: UserId
"""
self._assignee_id = assignee_id
@property
def start_ts(self):
"""Gets the start_ts of this AlarmInfo. # noqa: E501
Timestamp of the alarm start time, in milliseconds # noqa: E501
:return: The start_ts of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._start_ts
@start_ts.setter
def start_ts(self, start_ts):
"""Sets the start_ts of this AlarmInfo.
Timestamp of the alarm start time, in milliseconds # noqa: E501
:param start_ts: The start_ts of this AlarmInfo. # noqa: E501
:type: int
"""
self._start_ts = start_ts
@property
def end_ts(self):
"""Gets the end_ts of this AlarmInfo. # noqa: E501
Timestamp of the alarm end time(last time update), in milliseconds # noqa: E501
:return: The end_ts of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._end_ts
@end_ts.setter
def end_ts(self, end_ts):
"""Sets the end_ts of this AlarmInfo.
Timestamp of the alarm end time(last time update), in milliseconds # noqa: E501
:param end_ts: The end_ts of this AlarmInfo. # noqa: E501
:type: int
"""
self._end_ts = end_ts
@property
def ack_ts(self):
"""Gets the ack_ts of this AlarmInfo. # noqa: E501
Timestamp of the alarm acknowledgement, in milliseconds # noqa: E501
:return: The ack_ts of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._ack_ts
@ack_ts.setter
def ack_ts(self, ack_ts):
"""Sets the ack_ts of this AlarmInfo.
Timestamp of the alarm acknowledgement, in milliseconds # noqa: E501
:param ack_ts: The ack_ts of this AlarmInfo. # noqa: E501
:type: int
"""
self._ack_ts = ack_ts
@property
def clear_ts(self):
"""Gets the clear_ts of this AlarmInfo. # noqa: E501
Timestamp of the alarm clearing, in milliseconds # noqa: E501
:return: The clear_ts of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._clear_ts
@clear_ts.setter
def clear_ts(self, clear_ts):
"""Sets the clear_ts of this AlarmInfo.
Timestamp of the alarm clearing, in milliseconds # noqa: E501
:param clear_ts: The clear_ts of this AlarmInfo. # noqa: E501
:type: int
"""
self._clear_ts = clear_ts
@property
def assign_ts(self):
"""Gets the assign_ts of this AlarmInfo. # noqa: E501
Timestamp of the alarm assignment, in milliseconds # noqa: E501
:return: The assign_ts of this AlarmInfo. # noqa: E501
:rtype: int
"""
return self._assign_ts
@assign_ts.setter
def assign_ts(self, assign_ts):
"""Sets the assign_ts of this AlarmInfo.
Timestamp of the alarm assignment, in milliseconds # noqa: E501
:param assign_ts: The assign_ts of this AlarmInfo. # noqa: E501
:type: int
"""
self._assign_ts = assign_ts
@property
def details(self):
"""Gets the details of this AlarmInfo. # noqa: E501
:return: The details of this AlarmInfo. # noqa: E501
:rtype: JsonNode
"""
return self._details
@details.setter
def details(self, details):
"""Sets the details of this AlarmInfo.
:param details: The details of this AlarmInfo. # noqa: E501
:type: JsonNode
"""
self._details = details
@property
def propagate_to_owner_hierarchy(self):
"""Gets the propagate_to_owner_hierarchy of this AlarmInfo. # noqa: E501
Propagation flag to specify if alarm should be propagated to the owner (tenant or customer) and all parent owners in the customer hierarchy # noqa: E501
:return: The propagate_to_owner_hierarchy of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._propagate_to_owner_hierarchy
@propagate_to_owner_hierarchy.setter
def propagate_to_owner_hierarchy(self, propagate_to_owner_hierarchy):
"""Sets the propagate_to_owner_hierarchy of this AlarmInfo.
Propagation flag to specify if alarm should be propagated to the owner (tenant or customer) and all parent owners in the customer hierarchy # noqa: E501
:param propagate_to_owner_hierarchy: The propagate_to_owner_hierarchy of this AlarmInfo. # noqa: E501
:type: bool
"""
self._propagate_to_owner_hierarchy = propagate_to_owner_hierarchy
@property
def propagate(self):
"""Gets the propagate of this AlarmInfo. # noqa: E501
Propagation flag to specify if alarm should be propagated to parent entities of alarm originator # noqa: E501
:return: The propagate of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._propagate
@propagate.setter
def propagate(self, propagate):
"""Sets the propagate of this AlarmInfo.
Propagation flag to specify if alarm should be propagated to parent entities of alarm originator # noqa: E501
:param propagate: The propagate of this AlarmInfo. # noqa: E501
:type: bool
"""
self._propagate = propagate
@property
def propagate_to_tenant(self):
"""Gets the propagate_to_tenant of this AlarmInfo. # noqa: E501
Propagation flag to specify if alarm should be propagated to the tenant entity # noqa: E501
:return: The propagate_to_tenant of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._propagate_to_tenant
@propagate_to_tenant.setter
def propagate_to_tenant(self, propagate_to_tenant):
"""Sets the propagate_to_tenant of this AlarmInfo.
Propagation flag to specify if alarm should be propagated to the tenant entity # noqa: E501
:param propagate_to_tenant: The propagate_to_tenant of this AlarmInfo. # noqa: E501
:type: bool
"""
self._propagate_to_tenant = propagate_to_tenant
@property
def propagate_relation_types(self):
"""Gets the propagate_relation_types of this AlarmInfo. # noqa: E501
JSON array of relation types that should be used for propagation. By default, 'propagateRelationTypes' array is empty which means that the alarm will be propagated based on any relation type to parent entities. This parameter should be used only in case when 'propagate' parameter is set to true, otherwise, 'propagateRelationTypes' array will be ignored. # noqa: E501
:return: The propagate_relation_types of this AlarmInfo. # noqa: E501
:rtype: list[str]
"""
return self._propagate_relation_types
@propagate_relation_types.setter
def propagate_relation_types(self, propagate_relation_types):
"""Sets the propagate_relation_types of this AlarmInfo.
JSON array of relation types that should be used for propagation. By default, 'propagateRelationTypes' array is empty which means that the alarm will be propagated based on any relation type to parent entities. This parameter should be used only in case when 'propagate' parameter is set to true, otherwise, 'propagateRelationTypes' array will be ignored. # noqa: E501
:param propagate_relation_types: The propagate_relation_types of this AlarmInfo. # noqa: E501
:type: list[str]
"""
self._propagate_relation_types = propagate_relation_types
@property
def propagate_to_owner(self):
"""Gets the propagate_to_owner of this AlarmInfo. # noqa: E501
Propagation flag to specify if alarm should be propagated to the owner (tenant or customer) of alarm originator # noqa: E501
:return: The propagate_to_owner of this AlarmInfo. # noqa: E501
:rtype: bool
"""
return self._propagate_to_owner
@propagate_to_owner.setter
def propagate_to_owner(self, propagate_to_owner):
"""Sets the propagate_to_owner of this AlarmInfo.
Propagation flag to specify if alarm should be propagated to the owner (tenant or customer) of alarm originator # noqa: E501
:param propagate_to_owner: The propagate_to_owner of this AlarmInfo. # noqa: E501
:type: bool
"""
self._propagate_to_owner = propagate_to_owner
@property
def originator_name(self):
"""Gets the originator_name of this AlarmInfo. # noqa: E501
Alarm originator name # noqa: E501
:return: The originator_name of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._originator_name
@originator_name.setter
def originator_name(self, originator_name):
"""Sets the originator_name of this AlarmInfo.
Alarm originator name # noqa: E501
:param originator_name: The originator_name of this AlarmInfo. # noqa: E501
:type: str
"""
self._originator_name = originator_name
@property
def originator_label(self):
"""Gets the originator_label of this AlarmInfo. # noqa: E501
Alarm originator label # noqa: E501
:return: The originator_label of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._originator_label
@originator_label.setter
def originator_label(self, originator_label):
"""Sets the originator_label of this AlarmInfo.
Alarm originator label # noqa: E501
:param originator_label: The originator_label of this AlarmInfo. # noqa: E501
:type: str
"""
self._originator_label = originator_label
@property
def assignee(self):
"""Gets the assignee of this AlarmInfo. # noqa: E501
:return: The assignee of this AlarmInfo. # noqa: E501
:rtype: AlarmAssignee
"""
return self._assignee
@assignee.setter
def assignee(self, assignee):
"""Sets the assignee of this AlarmInfo.
:param assignee: The assignee of this AlarmInfo. # noqa: E501
:type: AlarmAssignee
"""
self._assignee = assignee
@property
def status(self):
"""Gets the status of this AlarmInfo. # noqa: E501
status of the Alarm # noqa: E501
:return: The status of this AlarmInfo. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this AlarmInfo.
status of the Alarm # noqa: E501
:param status: The status of this AlarmInfo. # noqa: E501
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
allowed_values = ["ACTIVE_ACK", "ACTIVE_UNACK", "CLEARED_ACK", "CLEARED_UNACK"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AlarmInfo, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AlarmInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_17/models/host_space.py
|
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_17 import models
class HostSpace(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'space': 'Space',
'time': 'int'
}
attribute_map = {
'name': 'name',
'space': 'space',
'time': 'time'
}
required_args = {
}
def __init__(
self,
name=None, # type: str
space=None, # type: models.Space
time=None, # type: int
):
"""
Keyword args:
name (str): A locally unique, system-generated name. The name cannot be modified.
space (Space): Displays size and space consumption information.
time (int)
"""
if name is not None:
self.name = name
if space is not None:
self.space = space
if time is not None:
self.time = time
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostSpace`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostSpace`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostSpace`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostSpace`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(HostSpace, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HostSpace):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/openpeerpower_frontend-20210523.2-py3-none-any.whl/opp_frontend/frontend_latest/chunk.1de86c48b013a218cc13.js
|
(self.webpackChunkopenpeerpower_frontend=self.webpackChunkopenpeerpower_frontend||[]).push([[8851],{22098:(e,t,r)=>{"use strict";var i=r(15652);function n(){n=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!s(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(e))return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return p(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?p(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=d(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:c(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=c(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function o(e){var t,r=d(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function a(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function s(e){return e.decorators&&e.decorators.length}function l(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function c(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function d(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function p(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}!function(e,t,r,i){var c=n();if(i)for(var d=0;d<i.length;d++)c=i[d](c);var p=t((function(e){c.initializeInstanceElements(e,f.elements)}),r),f=c.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(l(o.descriptor)||l(n.descriptor)){if(s(o)||s(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(s(o)){if(s(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}a(o,n)}else t.push(o)}return t}(p.d.map(o)),e);c.initializeClassElements(p.F,f.elements),c.runClassFinishers(p.F,f.finishers)}([(0,i.Mo)("ha-card")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[(0,i.Cb)()],key:"header",value:void 0},{kind:"field",decorators:[(0,i.Cb)({type:Boolean,reflect:!0})],key:"outlined",value:()=>!1},{kind:"get",static:!0,key:"styles",value:function(){return i.iv`
:host {
background: var(
--op-card-background,
var(--card-background-color, white)
);
border-radius: var(--op-card-border-radius, 4px);
box-shadow: var(
--op-card-box-shadow,
0px 2px 1px -1px rgba(0, 0, 0, 0.2),
0px 1px 1px 0px rgba(0, 0, 0, 0.14),
0px 1px 3px 0px rgba(0, 0, 0, 0.12)
);
color: var(--primary-text-color);
display: block;
transition: all 0.3s ease-out;
position: relative;
}
:host([outlined]) {
box-shadow: none;
border-width: var(--op-card-border-width, 1px);
border-style: solid;
border-color: var(
--op-card-border-color,
var(--divider-color, #e0e0e0)
);
}
.card-header,
:host ::slotted(.card-header) {
color: var(--op-card-header-color, --primary-text-color);
font-family: var(--op-card-header-font-family, inherit);
font-size: var(--op-card-header-font-size, 24px);
letter-spacing: -0.012em;
line-height: 48px;
padding: 12px 16px 16px;
display: block;
margin-block-start: 0px;
margin-block-end: 0px;
font-weight: normal;
}
:host ::slotted(.card-content:not(:first-child)),
slot:not(:first-child)::slotted(.card-content) {
padding-top: 0px;
margin-top: -8px;
}
:host ::slotted(.card-content) {
padding: 16px;
}
:host ::slotted(.card-actions) {
border-top: 1px solid var(--divider-color, #e8e8e8);
padding: 5px 16px;
}
`}},{kind:"method",key:"render",value:function(){return i.dy`
${this.header?i.dy`<h1 class="card-header">${this.header}</h1>`:i.dy``}
<slot></slot>
`}}]}}),i.oi)},36125:(e,t,r)=>{"use strict";r(29119);var i=r(15652);function n(){n=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!s(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(e))return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return p(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?p(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=d(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:c(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=c(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function o(e){var t,r=d(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function a(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function s(e){return e.decorators&&e.decorators.length}function l(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function c(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function d(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function p(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}function f(e,t,r){return(f="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var i=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=u(e)););return e}(e,t);if(i){var n=Object.getOwnPropertyDescriptor(i,t);return n.get?n.get.call(r):n.value}})(e,t,r||e)}function u(e){return(u=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}const h=customElements.get("mwc-fab");!function(e,t,r,i){var c=n();if(i)for(var d=0;d<i.length;d++)c=i[d](c);var p=t((function(e){c.initializeInstanceElements(e,f.elements)}),r),f=c.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(l(o.descriptor)||l(n.descriptor)){if(s(o)||s(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(s(o)){if(s(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}a(o,n)}else t.push(o)}return t}(p.d.map(o)),e);c.initializeClassElements(p.F,f.elements),c.runClassFinishers(p.F,f.finishers)}([(0,i.Mo)("ha-fab")],(function(e,t){class r extends t{constructor(...t){super(...t),e(this)}}return{F:r,d:[{kind:"method",key:"firstUpdated",value:function(e){f(u(r.prototype),"firstUpdated",this).call(this,e),this.style.setProperty("--mdc-theme-secondary","var(--primary-color)")}}]}}),h)},99282:(e,t,r)=>{"use strict";var i=r(55317),n=r(52039);class o extends n.C{connectedCallback(){super.connectedCallback(),setTimeout((()=>{this.path="ltr"===window.getComputedStyle(this).direction?i.zrb:i.gAv}),100)}}customElements.define("ha-icon-next",o)},88165:(e,t,r)=>{"use strict";var i=r(15652),n=r(81471);function o(){o=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!l(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(e))return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return f(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?f(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=p(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:d(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=d(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function a(e){var t,r=p(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function s(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function l(e){return e.decorators&&e.decorators.length}function c(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function d(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function p(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function f(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}!function(e,t,r,i){var n=o();if(i)for(var d=0;d<i.length;d++)n=i[d](n);var p=t((function(e){n.initializeInstanceElements(e,f.elements)}),r),f=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(c(o.descriptor)||c(n.descriptor)){if(l(o)||l(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(l(o)){if(l(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}s(o,n)}else t.push(o)}return t}(p.d.map(a)),e);n.initializeClassElements(p.F,f.elements),n.runClassFinishers(p.F,f.finishers)}([(0,i.Mo)("ha-config-section")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[(0,i.Cb)()],key:"isWide",value:()=>!1},{kind:"field",decorators:[(0,i.Cb)({type:Boolean})],key:"vertical",value:()=>!1},{kind:"method",key:"render",value:function(){return i.dy`
<div
class="content ${(0,n.$)({narrow:!this.isWide})}"
>
<div class="header"><slot name="header"></slot></div>
<div
class="together layout ${(0,n.$)({narrow:!this.isWide,vertical:this.vertical||!this.isWide,horizontal:!this.vertical&&this.isWide})}"
>
<div class="intro"><slot name="introduction"></slot></div>
<div class="panel flex-auto"><slot></slot></div>
</div>
</div>
`}},{kind:"get",static:!0,key:"styles",value:function(){return i.iv`
:host {
display: block;
}
.content {
padding: 28px 20px 0;
max-width: 1040px;
margin: 0 auto;
}
.layout {
display: flex;
}
.horizontal {
flex-direction: row;
}
.vertical {
flex-direction: column;
}
.flex-auto {
flex: 1 1 auto;
}
.header {
font-family: var(--paper-font-headline_-_font-family);
-webkit-font-smoothing: var(
--paper-font-headline_-_-webkit-font-smoothing
);
font-size: var(--paper-font-headline_-_font-size);
font-weight: var(--paper-font-headline_-_font-weight);
letter-spacing: var(--paper-font-headline_-_letter-spacing);
line-height: var(--paper-font-headline_-_line-height);
opacity: var(--dark-primary-opacity);
}
.together {
margin-top: 32px;
}
.intro {
font-family: var(--paper-font-subhead_-_font-family);
-webkit-font-smoothing: var(
--paper-font-subhead_-_-webkit-font-smoothing
);
font-weight: var(--paper-font-subhead_-_font-weight);
line-height: var(--paper-font-subhead_-_line-height);
width: 100%;
opacity: var(--dark-primary-opacity);
font-size: 14px;
padding-bottom: 20px;
}
.horizontal .intro {
max-width: 400px;
margin-right: 40px;
}
.panel {
margin-top: -24px;
}
.panel ::slotted(*) {
margin-top: 24px;
display: block;
}
.narrow.content {
max-width: 640px;
}
.narrow .together {
margin-top: 20px;
}
.narrow .intro {
padding-bottom: 20px;
margin-right: 0;
max-width: 500px;
}
`}}]}}),i.oi)},58851:(e,t,r)=>{"use strict";r.r(t),r.d(t,{zhaTabs:()=>m});r(53918);var i=r(55317),n=(r(53973),r(89194),r(15652)),o=r(87744),a=(r(22098),r(36125),r(99282),r(13491),r(11654));r(88165);function s(){s=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!d(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(e))return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return h(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?h(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=u(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:f(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=f(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function l(e){var t,r=u(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function c(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function d(e){return e.decorators&&e.decorators.length}function p(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function f(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function u(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function h(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}const m=[{translationKey:"ui.panel.config.zha.network.caption",path:"/config/zha/dashboard",iconPath:i.CP8},{translationKey:"ui.panel.config.zha.groups.caption",path:"/config/zha/groups",iconPath:i.Rmq},{translationKey:"ui.panel.config.zha.visualization.caption",path:"/config/zha/visualization",iconPath:i.StF}];!function(e,t,r,i){var n=s();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var a=t((function(e){n.initializeInstanceElements(e,f.elements)}),r),f=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(p(o.descriptor)||p(n.descriptor)){if(d(o)||d(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(d(o)){if(d(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}c(o,n)}else t.push(o)}return t}(a.d.map(l)),e);n.initializeClassElements(a.F,f.elements),n.runClassFinishers(a.F,f.finishers)}([(0,n.Mo)("zha-config-dashboard")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({type:Object})],key:"opp",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Object})],key:"route",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"narrow",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"isWide",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"configEntryId",value:void 0},{kind:"method",key:"render",value:function(){return n.dy`
<opp-tabs-subpage
.opp=${this.opp}
.narrow=${this.narrow}
.route=${this.route}
.tabs=${m}
back-path="/config/integrations"
>
<ha-card header="Zigbee Network">
<div class="card-content">
In the future you can change network settings for ZHA here.
</div>
${this.configEntryId?n.dy`<div class="card-actions">
<a
href="${`/config/devices/dashboard?historyBack=1&config_entry=${this.configEntryId}`}"
>
<mwc-button
>${this.opp.localize("ui.panel.config.devices.caption")}</mwc-button
>
</a>
<a
href="${`/config/entities/dashboard?historyBack=1&config_entry=${this.configEntryId}`}"
>
<mwc-button
>${this.opp.localize("ui.panel.config.entities.caption")}</mwc-button
>
</a>
</div>`:""}
</ha-card>
<a href="/config/zha/add" slot="fab">
<ha-fab
.label=${this.opp.localize("ui.panel.config.zha.add_device")}
extended
?rtl=${(0,o.HE)(this.opp)}
>
<ha-svg-icon slot="icon" .path=${i.qX5}></ha-svg-icon>
</ha-fab>
</a>
</opp-tabs-subpage>
`}},{kind:"get",static:!0,key:"styles",value:function(){return[a.Qx,n.iv`
ha-card {
margin: auto;
margin-top: 16px;
max-width: 500px;
}
`]}}]}}),n.oi)}}]);
//# sourceMappingURL=chunk.1de86c48b013a218cc13.js.map
|
PypiClean
|
/tensorboard_plugin_customizable_plots-1.1.3-py3-none-any.whl/tensorboard_plugin_customizable_plots/static/libs/config/traces_config.js
|
function axisPeriod(axis) {
return {
valType: 'any',
dflt: 0,
editType: 'calc',
description: [
'Only relevant when the axis `type` is *date*.',
'Sets the period positioning in milliseconds or *M<n>* on the ' + axis + ' axis.',
'Special values in the form of *M<n>* could be used to declare',
'the number of months. In this case `n` must be a positive integer.'
].join(' ')
};
}
function axisPeriod0(axis) {
return {
valType: 'any',
editType: 'calc',
description: [
'Only relevant when the axis `type` is *date*.',
'Sets the base for period positioning in milliseconds or date string on the ' + axis + ' axis.',
'When `' + axis + 'period` is round number of weeks,',
'the `' + axis + 'period0` by default would be on a Sunday i.e. 2000-01-02,',
'otherwise it would be at 2000-01-01.'
].join(' ')
};
}
function axisPeriodAlignment(axis) {
return {
valType: 'enumerated',
values: [
'start', 'middle', 'end'
],
dflt: 'middle',
editType: 'calc',
description: [
'Only relevant when the axis `type` is *date*.',
'Sets the alignment of data points on the ' + axis + ' axis.'
].join(' ')
};
}
var tracesAttributes = {
//boolean
showlegend: {
valType: 'boolean',
dflt: true,
editType: 'style',
description: [
'Determines whether or not an item corresponding to this',
'trace is shown in the legend.'
].join(' ')
},
type: {
valType: 'string',
values: [], // listed dynamically
dflt: 'scatter',
editType: 'calc+clearAxisTypes',
_noTemplating: true // we handle this at a higher level
},
mode: {
valType: 'string',
flags: ['lines', 'markers', 'text'],
extras: ['none'],
editType: 'calc',
dflt: 'lines+points',
description: [
'Determines the drawing mode for this scatter trace.',
'If the provided `mode` includes *text* then the `text` elements',
'appear at the coordinates. Otherwise, the `text` elements',
'appear on hover.',
'If there are less than ' + 20 + ' points',
'and the trace is not stacked',
'then the default is *lines+markers*. Otherwise, *lines*.',
`\nflags: ${['lines', 'markers', 'text', 'none']}`
].join(' ')
},
//enumerated
visible: {
valType: 'enumerated',
values: [true, false, 'legendonly'],
dflt: true,
editType: 'calc',
description: [
'Determines whether or not this trace is visible.',
'If *legendonly*, the trace is not drawn,',
'but can appear as a legend item',
'(provided that the legend itself is visible).'
].join(' ')
},
orientation: {
valType: 'enumerated',
values: ['v', 'h'],
editType: 'calc',
description: [
'Only relevant when `stackgroup` is used, and only the first',
'`orientation` found in the `stackgroup` will be used - including',
'if `visible` is *legendonly* but not if it is `false`. Sets the',
'stacking direction. With *v* (*h*), the y (x) values of subsequent',
'traces are added. Also affects the default value of `fill`.'
].join(' ')
},
groupnorm: {
valType: 'enumerated',
values: ['', 'fraction', 'percent'],
dflt: '',
editType: 'calc',
description: [
'Only relevant when `stackgroup` is used, and only the first',
'`groupnorm` found in the `stackgroup` will be used - including',
'if `visible` is *legendonly* but not if it is `false`.',
'Sets the normalization for the sum of this `stackgroup`.',
'With *fraction*, the value of each trace at each location is',
'divided by the sum of all trace values at that location.',
'*percent* is the same but multiplied by 100 to show percentages.',
'If there are multiple subplots, or multiple `stackgroup`s on one',
'subplot, each will be normalized within its own set.'
].join(' ')
},
stackgaps: {
valType: 'enumerated',
values: ['infer zero', 'interpolate'],
dflt: 'infer zero',
editType: 'calc',
description: [
'Only relevant when `stackgroup` is used, and only the first',
'`stackgaps` found in the `stackgroup` will be used - including',
'if `visible` is *legendonly* but not if it is `false`.',
'Determines how we handle locations at which other traces in this',
'group have data but this one does not.',
'With *infer zero* we insert a zero at these locations.',
'With *interpolate* we linearly interpolate between existing',
'values, and extrapolate a constant beyond the existing values.'
// TODO - implement interrupt mode
// '*interrupt* omits this trace from the stack at this location by',
// 'dropping abruptly, midway between the existing and missing locations.'
].join(' ')
},
//string
legendgroup: {
valType: 'string',
dflt: '',
editType: 'style',
description: [
'Sets the legend group for this trace.',
'Traces part of the same legend group hide/show at the same time',
'when toggling legend items.'
].join(' ')
},
stackgroup: {
valType: 'string',
dflt: '',
editType: 'calc',
description: [
'Set several scatter traces (on the same subplot) to the same',
'stackgroup in order to add their y values (or their x values if',
'`orientation` is *h*). If blank or omitted this trace will not be',
'stacked. Stacking also turns `fill` on by default, using *tonexty*',
'(*tonextx*) if `orientation` is *h* (*v*) and sets the default',
'`mode` to *lines* irrespective of point count.',
'You can only stack on a numeric (linear or log) axis.',
'Traces in a `stackgroup` will only fill to (or be filled to) other',
'traces in the same group. With multiple `stackgroup`s or some',
'traces stacked and some not, if fill-linked traces are not already',
'consecutive, the later ones will be pushed down in the drawing order.'
].join(' ')
},
hovertext: {
valType: 'string',
dflt: '',
arrayOk: true,
editType: 'style',
description: [
'Sets hover text elements associated with each (x,y) pair.',
'If a single string, the same string appears over',
'all the data points.',
'If an array of string, the items are mapped in order to the',
'this trace\'s (x,y) coordinates.',
'To be seen, trace `hoverinfo` must contain a *text* flag.'
].join(' ')
},
hoveron: {
valType: 'string',
flags: ['points', 'fills'],
editType: 'style',
description: [
'Do the hover effects highlight individual points (markers or',
'line points) or do they highlight filled regions?',
'If the fill is *toself* or *tonext* and there are no markers',
'or text, then the default is *fills*, otherwise it is *points*.'
].join(' ')
},
//number
legendrank: {
valType: 'number',
dflt: 1000,
editType: 'style',
description: [
'Sets the legend rank for this trace.',
'Items and groups with smaller ranks are presented on top/left side while',
'with `*reversed* `legend.traceorder` they are on bottom/right side.',
'The default legendrank is 1000,',
'so that you can use ranks less than 1000 to place certain items before all unranked items,',
'and ranks greater than 1000 to go after all unranked items.'
].join(' ')
},
legendwidth: {
valType: 'number',
min: 0,
editType: 'style',
description: 'Sets the width (in px or fraction) of the legend for this trace.',
},
opacity: {
valType: 'number',
min: 0,
max: 1,
dflt: 1,
editType: 'style',
description: 'Sets the opacity of the trace.'
},
// name: {
// valType: 'string',
// editType: 'style',
// description: [
// 'Sets the trace name.',
// 'The trace name appear as the legend item and on hover.'
// ].join(' ')
// },
// uid: {
// valType: 'string',
// editType: 'plot',
// anim: true,
// description: [
// 'Assign an id to this trace,',
// 'Use this to provide object constancy between traces during animations',
// 'and transitions.'
// ].join(' ')
// },
// ids: {
// valType: 'data_array',
// editType: 'calc',
// anim: true,
// description: [
// 'Assigns id labels to each datum.',
// 'These ids for object constancy of data points during animation.',
// 'Should be an array of strings, not numbers or any other type.'
// ].join(' ')
// },
// customdata: {
// valType: 'data_array',
// editType: 'calc',
// description: [
// 'Assigns extra data each datum.',
// 'This may be useful when listening to hover, click and selection events.',
// 'Note that, *scatter* traces also appends customdata items in the markers',
// 'DOM elements'
// ].join(' ')
// },
// meta: {
// valType: 'any',
// arrayOk: true,
// editType: 'plot',
// description: [
// 'Assigns extra meta information associated with this trace',
// 'that can be used in various text attributes.',
// 'Attributes such as trace `name`, graph, axis and colorbar `title.text`, annotation `text`',
// '`rangeselector`, `updatemenues` and `sliders` `label` text',
// 'all support `meta`.',
// 'To access the trace `meta` values in an attribute in the same trace, simply use',
// '`%{meta[i]}` where `i` is the index or key of the `meta`',
// 'item in question.',
// 'To access trace `meta` in layout attributes, use',
// '`%{data[n[.meta[i]}` where `i` is the index or key of the `meta`',
// 'and `n` is the trace index.'
// ].join(' ')
// },
//
// // N.B. these cannot be 'data_array' as they do not have the same length as
// // other data arrays and arrayOk attributes in general
// //
// // Maybe add another valType:
// // https://github.com/plotly/plotly.js/issues/1894
// selectedpoints: {
// valType: 'any',
// editType: 'calc',
// description: [
// 'Array containing integer indices of selected points.',
// 'Has an effect only for traces that support selections.',
// 'Note that an empty array means an empty selection where the `unselected`',
// 'are turned on for all points, whereas, any other non-array values means no',
// 'selection all where the `selected` and `unselected` styles have no effect.'
// ].join(' ')
// },
//
hoverinfo: {
valType: 'string',
flags: ['x', 'y', 'z', 'text', 'name'],
extras: ['all', 'none', 'skip'],
arrayOk: true,
dflt: 'all',
editType: 'none',
description: [
'Determines which trace information appear on hover.',
'If `none` or `skip` are set, no information is displayed upon hovering.',
'But, if `none` is set, click and hover events are still fired.',
`flags: ${['x', 'y', 'z', 'text', 'name']}`,
`$extras: {['all', 'none', 'skip']}`
].join(' ')
},
// // hoverlabel: fxAttrs.hoverlabel,
// stream: {
// token: {
// valType: 'string',
// noBlank: true,
// strict: true,
// editType: 'calc',
// description: [
// 'The stream id number links a data trace on a plot with a stream.',
// 'See https://chart-studio.plotly.com/settings for more details.'
// ].join(' ')
// },
// maxpoints: {
// valType: 'number',
// min: 0,
// max: 10000,
// dflt: 500,
// editType: 'calc',
// description: [
// 'Sets the maximum number of points to keep on the plots from an',
// 'incoming stream.',
// 'If `maxpoints` is set to *50*, only the newest 50 points will',
// 'be displayed on the plot.'
// ].join(' ')
// },
// },
//
// // from scatter plot attributes
//
// // x: {
// // valType: 'data_array',
// // editType: 'calc+clearAxisTypes',
// // anim: true,
// // description: 'Sets the x coordinates.'
// // },
x0: {
valType: 'string',
dflt: 0,
editType: 'calc+clearAxisTypes',
anim: true,
description: [
'Alternate to `x`.',
'Builds a linear space of x coordinates.',
'Use with `dx`',
'where `x0` is the starting coordinate and `dx` the step.'
].join(' ')
},
dx: {
valType: 'number',
dflt: 1,
editType: 'calc',
anim: true,
description: [
'Sets the x coordinate step.',
'See `x0` for more info.'
].join(' ')
},
// // y: {
// // valType: 'data_array',
// // editType: 'calc+clearAxisTypes',
// // anim: true,
// // description: 'Sets the y coordinates.'
// // },
y0: {
valType: 'string',
dflt: 0,
editType: 'calc+clearAxisTypes',
anim: true,
description: [
'Alternate to `y`.',
'Builds a linear space of y coordinates.',
'Use with `dy`',
'where `y0` is the starting coordinate and `dy` the step.'
].join(' ')
},
dy: {
valType: 'number',
dflt: 1,
editType: 'calc',
anim: true,
description: [
'Sets the y coordinate step.',
'See `y0` for more info.'
].join(' ')
},
//
xperiod: axisPeriod('x'),
yperiod: axisPeriod('y'),
xperiod0: axisPeriod0('x0'),
yperiod0: axisPeriod0('y0'),
xperiodalignment: axisPeriodAlignment('x'),
yperiodalignment: axisPeriodAlignment('y'),
// xhoverformat: axisHoverFormat('x'),
// yhoverformat: axisHoverFormat('y'),
text: {
valType: 'string',
dflt: '',
arrayOk: true,
editType: 'calc',
description: [
'Sets text elements associated with each (x,y) pair.',
'If a single string, the same string appears over',
'all the data points.',
'If an array of string, the items are mapped in order to the',
'this trace\'s (x,y) coordinates.',
'If trace `hoverinfo` contains a *text* flag and *hovertext* is not set,',
'these elements will be seen in the hover labels.'
].join(' ')
},
//
//
//
line: {
color: {
valType: 'string',
editType: 'style',
anim: true,
description: 'Sets the line color.'
},
width: {
valType: 'number',
min: 0,
dflt: 2,
editType: 'style',
anim: true,
description: 'Sets the line width (in px).'
},
shape: {
valType: 'enumerated',
values: ['linear', 'spline', 'hv', 'vh', 'hvh', 'vhv'],
dflt: 'linear',
editType: 'plot',
description: [
'Determines the line shape.',
'With *spline* the lines are drawn using spline interpolation.',
'The other available values correspond to step-wise line shapes.'
].join(' ')
},
smoothing: {
valType: 'number',
min: 0,
max: 1.3,
dflt: 1,
editType: 'plot',
description: [
'Has an effect only if `shape` is set to *spline*',
'Sets the amount of smoothing.',
'*0* corresponds to no smoothing (equivalent to a *linear* shape).'
].join(' ')
},
simplify: {
valType: 'boolean',
dflt: true,
editType: 'plot',
description: [
'Simplifies lines by removing nearly-collinear points. When transitioning',
'lines, it may be desirable to disable this so that the number of points',
'along the resulting SVG path is unaffected.'
].join(' ')
},
},
legendgrouptitle: {
text: {
valType: 'string',
dflt: '',
editType: 'style',
description: [
'Sets the title of the legend group.'
].join(' ')
},
// font: fontAttrs({
// editType: 'style',
// description: [
// 'Sets this legend group\'s title font.'
// ].join(' '),
// }),
},
//
// connectgaps: {
// valType: 'boolean',
// dflt: false,
// editType: 'calc',
// description: [
// 'Determines whether or not gaps',
// '(i.e. {nan} or missing values)',
// 'in the provided data arrays are connected.'
// ].join(' ')
// },
// cliponaxis: {
// valType: 'boolean',
// dflt: true,
// editType: 'plot',
// description: [
// 'Determines whether or not markers and text nodes',
// 'are clipped about the subplot axes.',
// 'To show markers and text nodes above axis lines and tick labels,',
// 'make sure to set `xaxis.layer` and `yaxis.layer` to *below traces*.'
// ].join(' ')
// },
//
// fill: {
// valType: 'enumerated',
// values: ['none', 'tozeroy', 'tozerox', 'tonexty', 'tonextx', 'toself', 'tonext'],
// editType: 'calc',
// description: [
// 'Sets the area to fill with a solid color.',
// 'Defaults to *none* unless this trace is stacked, then it gets',
// '*tonexty* (*tonextx*) if `orientation` is *v* (*h*)',
// 'Use with `fillcolor` if not *none*.',
// '*tozerox* and *tozeroy* fill to x=0 and y=0 respectively.',
// '*tonextx* and *tonexty* fill between the endpoints of this',
// 'trace and the endpoints of the trace before it, connecting those',
// 'endpoints with straight lines (to make a stacked area graph);',
// 'if there is no trace before it, they behave like *tozerox* and',
// '*tozeroy*.',
// '*toself* connects the endpoints of the trace (or each segment',
// 'of the trace if it has gaps) into a closed shape.',
// '*tonext* fills the space between two traces if one completely',
// 'encloses the other (eg consecutive contour lines), and behaves like',
// '*toself* if there is no trace before it. *tonext* should not be',
// 'used if one trace does not enclose the other.',
// 'Traces in a `stackgroup` will only fill to (or be filled to) other',
// 'traces in the same group. With multiple `stackgroup`s or some',
// 'traces stacked and some not, if fill-linked traces are not already',
// 'consecutive, the later ones will be pushed down in the drawing order.'
// ].join(' ')
// },
// fillcolor: {
// valType: 'color',
// editType: 'style',
// anim: true,
// description: [
// 'Sets the fill color.',
// 'Defaults to a half-transparent variant of the line color,',
// 'marker color, or marker line color, whichever is available.'
// ].join(' ')
// },
// fillpattern: {
// valType: 'string',
// // string type usually doesn't take values... this one should really be
// // a special type or at least a special coercion function, from the GUI
// // you only get these values but elsewhere the user can supply a list of
// // dash lengths in px, and it will be honored
// values: ['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot'],
// dflt: 'solid',
// editType: 'style',
// description: [
// 'Sets the dash style of lines. Set to a dash type string',
// '(*solid*, *dot*, *dash*, *longdash*, *dashdot*, or *longdashdot*)',
// 'or a dash length list in px (eg *5px,10px,2px,2px*).'
// ].join(' ')
// },
// marker:{
// // symbol: {
// // valType: 'enumerated',
// // values: Drawing.symbolList,
// // dflt: 'circle',
// // arrayOk: true,
// // editType: 'style',
// // description: [
// // 'Sets the marker symbol type.',
// // 'Adding 100 is equivalent to appending *-open* to a symbol name.',
// // 'Adding 200 is equivalent to appending *-dot* to a symbol name.',
// // 'Adding 300 is equivalent to appending *-open-dot*',
// // 'or *dot-open* to a symbol name.'
// // ].join(' ')
// // },
// opacity: {
// valType: 'number',
// min: 0,
// max: 1,
// arrayOk: true,
// editType: 'style',
// anim: true,
// description: 'Sets the marker opacity.'
// },
// size: {
// valType: 'number',
// min: 0,
// dflt: 6,
// arrayOk: true,
// editType: 'calc',
// anim: true,
// description: 'Sets the marker size (in px).'
// },
// maxdisplayed: {
// valType: 'number',
// min: 0,
// dflt: 0,
// editType: 'plot',
// description: [
// 'Sets a maximum number of points to be drawn on the graph.',
// '*0* corresponds to no limit.'
// ].join(' ')
// },
// sizeref: {
// valType: 'number',
// dflt: 1,
// editType: 'calc',
// description: [
// 'Has an effect only if `marker.size` is set to a numerical array.',
// 'Sets the scale factor used to determine the rendered size of',
// 'marker points. Use with `sizemin` and `sizemode`.'
// ].join(' ')
// },
// sizemin: {
// valType: 'number',
// min: 0,
// dflt: 0,
// editType: 'calc',
// description: [
// 'Has an effect only if `marker.size` is set to a numerical array.',
// 'Sets the minimum size (in px) of the rendered marker points.'
// ].join(' ')
// },
// sizemode: {
// valType: 'enumerated',
// values: ['diameter', 'area'],
// dflt: 'diameter',
// editType: 'calc',
// description: [
// 'Has an effect only if `marker.size` is set to a numerical array.',
// 'Sets the rule for which the data in `size` is converted',
// 'to pixels.'
// ].join(' ')
// },
//
// line: {
// width: {
// valType: 'number',
// min: 0,
// arrayOk: true,
// editType: 'style',
// anim: true,
// description: 'Sets the width (in px) of the lines bounding the marker points.'
// },
// },
// gradient: {
// type: {
// valType: 'enumerated',
// values: ['radial', 'horizontal', 'vertical', 'none'],
// arrayOk: true,
// dflt: 'none',
// editType: 'calc',
// description: [
// 'Sets the type of gradient used to fill the markers'
// ].join(' ')
// },
// color: {
// valType: 'string',
// arrayOk: true,
// editType: 'calc',
// description: [
// 'Sets the final color of the gradient fill:',
// 'the center color for radial, the right for horizontal,',
// 'or the bottom for vertical.',
// ].join(' ')
// },
// },
// },
//
// selected: {
// marker: {
// opacity: {
// valType: 'number',
// min: 0,
// max: 1,
// editType: 'style',
// description: 'Sets the marker opacity of selected points.'
// },
// color: {
// valType: 'string',
// editType: 'style',
// description: 'Sets the marker color of selected points.'
// },
// size: {
// valType: 'number',
// min: 0,
// editType: 'style',
// description: 'Sets the marker size of selected points.'
// },
// },
// textfont: {
// color: {
// valType: 'string',
// editType: 'style',
// description: 'Sets the text font color of selected points.'
// },
// },
// },
// unselected: {
// marker: {
// opacity: {
// valType: 'number',
// min: 0,
// max: 1,
// editType: 'style',
// description: 'Sets the marker opacity of unselected points, applied only when a selection exists.'
// },
// color: {
// valType: 'string',
// editType: 'style',
// description: 'Sets the marker color of unselected points, applied only when a selection exists.'
// },
// size: {
// valType: 'number',
// min: 0,
// editType: 'style',
// description: 'Sets the marker size of unselected points, applied only when a selection exists.'
// },
// },
// textfont: {
// color: {
// valType: 'string',
// editType: 'style',
// description: 'Sets the text font color of unselected points, applied only when a selection exists.'
// },
// },
// },
//
// textposition: {
// valType: 'enumerated',
// values: [
// 'top left', 'top center', 'top right',
// 'middle left', 'middle center', 'middle right',
// 'bottom left', 'bottom center', 'bottom right'
// ],
// dflt: 'middle center',
// arrayOk: true,
// editType: 'calc',
// description: [
// 'Sets the positions of the `text` elements',
// 'with respects to the (x,y) coordinates.'
// ].join(' ')
// },
// textfont: fontAttrs({
// editType: 'calc',
// colorEditType: 'style',
// arrayOk: true,
// description: 'Sets the text font.'
// }),
//custom json
CustomSettings: {
json:{
valType: 'custom',
dflt: {},
description: [
'This may be used to add any other settings from Plotly.js',
].join(' ')
}
},
};
var dfltTracesConfig = {};
function crawl(src, target) {
for(var k in src) {
var obj = src[k];
if(obj.valType) {
target[k] = obj.dflt;
} else {
if(!target[k]) {
target[k] = {};
}
crawl(obj, target[k]);
}
}
}
crawl(tracesAttributes, dfltTracesConfig);
|
PypiClean
|
/high_vision-0.1-py3-none-any.whl/high_vision/Detection/yolov4/tf/train.py
|
from os import makedirs, path
import tensorflow as tf
from tensorflow.keras import backend
from tensorflow.keras.callbacks import Callback
from tensorflow.keras.losses import BinaryCrossentropy, Loss, Reduction
class YOLOv4Loss(Loss):
def __init__(self, batch_size, iou_type, verbose=0):
super(YOLOv4Loss, self).__init__(name="YOLOv4Loss")
self.batch_size = batch_size
if iou_type == "iou":
self.bbox_xiou = bbox_iou
elif iou_type == "giou":
self.bbox_xiou = bbox_giou
elif iou_type == "ciou":
self.bbox_xiou = bbox_ciou
self.verbose = verbose
self.while_cond = lambda i, iou: tf.less(i, self.batch_size)
self.prob_binaryCrossentropy = BinaryCrossentropy(
reduction=Reduction.NONE
)
def call(self, y_true, y_pred):
"""
@param y_true: Dim(batch, grid, grid, 3,
(b_x, b_y, b_w, b_h, conf, prob_0, prob_1, ...))
@param y_pred: Dim(batch, grid, grid, 3,
(b_x, b_y, b_w, b_h, conf, prob_0, prob_1, ...))
"""
if len(y_pred.shape) == 4:
_, grid_size, _, box_size = y_pred.shape
box_size = box_size // 3
else:
_, grid_size, _, _, box_size = y_pred.shape
y_true = tf.reshape(
y_true, shape=(-1, grid_size * grid_size * 3, box_size)
)
y_pred = tf.reshape(
y_pred, shape=(-1, grid_size * grid_size * 3, box_size)
)
truth_xywh = y_true[..., 0:4]
truth_conf = y_true[..., 4:5]
truth_prob = y_true[..., 5:]
num_classes = truth_prob.shape[-1]
pred_xywh = y_pred[..., 0:4]
pred_conf = y_pred[..., 4:5]
pred_prob = y_pred[..., 5:]
one_obj = truth_conf
num_obj = tf.reduce_sum(one_obj, axis=[1, 2])
one_noobj = 1.0 - one_obj
# Dim(batch, grid * grid * 3, 1)
one_obj_mask = one_obj > 0.5
zero = tf.zeros((1, grid_size * grid_size * 3, 1), dtype=tf.float32)
# IoU Loss
xiou = self.bbox_xiou(truth_xywh, pred_xywh)
xiou_scale = 2.0 - truth_xywh[..., 2:3] * truth_xywh[..., 3:4]
xiou_loss = one_obj * xiou_scale * (1.0 - xiou[..., tf.newaxis])
xiou_loss = 3 * tf.reduce_mean(tf.reduce_sum(xiou_loss, axis=(1, 2)))
# Confidence Loss
i0 = tf.constant(0)
def body(i, max_iou):
object_mask = tf.reshape(one_obj_mask[i, ...], shape=(-1,))
truth_bbox = tf.boolean_mask(truth_xywh[i, ...], mask=object_mask)
# grid * grid * 3, 1, xywh
# 1, answer, xywh
# => grid * grid * 3, answer
_max_iou0 = tf.cond(
tf.equal(num_obj[i], 0),
lambda: zero,
lambda: tf.reshape(
tf.reduce_max(
bbox_iou(
pred_xywh[i, :, tf.newaxis, :],
truth_bbox[tf.newaxis, ...],
),
axis=-1,
),
shape=(1, -1, 1),
),
)
# 1, grid * grid * 3, 1
_max_iou1 = tf.cond(
tf.equal(i, 0),
lambda: _max_iou0,
lambda: tf.concat([max_iou, _max_iou0], axis=0),
)
return tf.add(i, 1), _max_iou1
_, max_iou = tf.while_loop(
self.while_cond,
body,
[i0, zero],
shape_invariants=[
i0.get_shape(),
tf.TensorShape([None, grid_size * grid_size * 3, 1]),
],
)
conf_obj_loss = one_obj * (0.0 - backend.log(pred_conf + 1e-9))
conf_noobj_loss = (
one_noobj
* tf.cast(max_iou < 0.5, dtype=tf.float32)
* (0.0 - backend.log(1.0 - pred_conf + 1e-9))
)
conf_loss = tf.reduce_mean(
tf.reduce_sum(conf_obj_loss + conf_noobj_loss, axis=(1, 2))
)
# Probabilities Loss
prob_loss = self.prob_binaryCrossentropy(truth_prob, pred_prob)
prob_loss = one_obj * prob_loss[..., tf.newaxis]
prob_loss = tf.reduce_mean(
tf.reduce_sum(prob_loss, axis=(1, 2)) * num_classes
)
total_loss = xiou_loss + conf_loss + prob_loss
if self.verbose != 0:
tf.print(
"grid:",
grid_size,
"iou_loss:",
xiou_loss,
"conf_loss:",
conf_loss,
"prob_loss:",
prob_loss,
"total_loss",
total_loss,
)
return total_loss
def bbox_iou(bboxes1, bboxes2):
"""
@param bboxes1: (a, b, ..., 4)
@param bboxes2: (A, B, ..., 4)
x:X is 1:n or n:n or n:1
@return (max(a,A), max(b,B), ...)
ex) (4,):(3,4) -> (3,)
(2,1,4):(2,3,4) -> (2,3)
"""
bboxes1_area = bboxes1[..., 2] * bboxes1[..., 3]
bboxes2_area = bboxes2[..., 2] * bboxes2[..., 3]
bboxes1_coor = tf.concat(
[
bboxes1[..., :2] - bboxes1[..., 2:] * 0.5,
bboxes1[..., :2] + bboxes1[..., 2:] * 0.5,
],
axis=-1,
)
bboxes2_coor = tf.concat(
[
bboxes2[..., :2] - bboxes2[..., 2:] * 0.5,
bboxes2[..., :2] + bboxes2[..., 2:] * 0.5,
],
axis=-1,
)
left_up = tf.maximum(bboxes1_coor[..., :2], bboxes2_coor[..., :2])
right_down = tf.minimum(bboxes1_coor[..., 2:], bboxes2_coor[..., 2:])
inter_section = tf.maximum(right_down - left_up, 0.0)
inter_area = inter_section[..., 0] * inter_section[..., 1]
union_area = bboxes1_area + bboxes2_area - inter_area
iou = inter_area / (union_area + 1e-8)
return iou
def bbox_giou(bboxes1, bboxes2):
"""
Generalized IoU
@param bboxes1: (a, b, ..., 4)
@param bboxes2: (A, B, ..., 4)
x:X is 1:n or n:n or n:1
@return (max(a,A), max(b,B), ...)
ex) (4,):(3,4) -> (3,)
(2,1,4):(2,3,4) -> (2,3)
"""
bboxes1_area = bboxes1[..., 2] * bboxes1[..., 3]
bboxes2_area = bboxes2[..., 2] * bboxes2[..., 3]
bboxes1_coor = tf.concat(
[
bboxes1[..., :2] - bboxes1[..., 2:] * 0.5,
bboxes1[..., :2] + bboxes1[..., 2:] * 0.5,
],
axis=-1,
)
bboxes2_coor = tf.concat(
[
bboxes2[..., :2] - bboxes2[..., 2:] * 0.5,
bboxes2[..., :2] + bboxes2[..., 2:] * 0.5,
],
axis=-1,
)
left_up = tf.maximum(bboxes1_coor[..., :2], bboxes2_coor[..., :2])
right_down = tf.minimum(bboxes1_coor[..., 2:], bboxes2_coor[..., 2:])
inter_section = tf.maximum(right_down - left_up, 0.0)
inter_area = inter_section[..., 0] * inter_section[..., 1]
union_area = bboxes1_area + bboxes2_area - inter_area
iou = inter_area / (union_area + 1e-8)
enclose_left_up = tf.minimum(bboxes1_coor[..., :2], bboxes2_coor[..., :2])
enclose_right_down = tf.maximum(
bboxes1_coor[..., 2:], bboxes2_coor[..., 2:]
)
enclose_section = enclose_right_down - enclose_left_up
enclose_area = enclose_section[..., 0] * enclose_section[..., 1]
giou = iou - (enclose_area - union_area) / (enclose_area + 1e-8)
return giou
def bbox_ciou(bboxes1, bboxes2):
"""
Complete IoU
@param bboxes1: (a, b, ..., 4)
@param bboxes2: (A, B, ..., 4)
x:X is 1:n or n:n or n:1
@return (max(a,A), max(b,B), ...)
ex) (4,):(3,4) -> (3,)
(2,1,4):(2,3,4) -> (2,3)
"""
bboxes1_area = bboxes1[..., 2] * bboxes1[..., 3]
bboxes2_area = bboxes2[..., 2] * bboxes2[..., 3]
bboxes1_coor = tf.concat(
[
bboxes1[..., :2] - bboxes1[..., 2:] * 0.5,
bboxes1[..., :2] + bboxes1[..., 2:] * 0.5,
],
axis=-1,
)
bboxes2_coor = tf.concat(
[
bboxes2[..., :2] - bboxes2[..., 2:] * 0.5,
bboxes2[..., :2] + bboxes2[..., 2:] * 0.5,
],
axis=-1,
)
left_up = tf.maximum(bboxes1_coor[..., :2], bboxes2_coor[..., :2])
right_down = tf.minimum(bboxes1_coor[..., 2:], bboxes2_coor[..., 2:])
inter_section = tf.maximum(right_down - left_up, 0.0)
inter_area = inter_section[..., 0] * inter_section[..., 1]
union_area = bboxes1_area + bboxes2_area - inter_area
iou = inter_area / (union_area + 1e-8)
enclose_left_up = tf.minimum(bboxes1_coor[..., :2], bboxes2_coor[..., :2])
enclose_right_down = tf.maximum(
bboxes1_coor[..., 2:], bboxes2_coor[..., 2:]
)
enclose_section = enclose_right_down - enclose_left_up
c_2 = enclose_section[..., 0] ** 2 + enclose_section[..., 1] ** 2
center_diagonal = bboxes2[..., :2] - bboxes1[..., :2]
rho_2 = center_diagonal[..., 0] ** 2 + center_diagonal[..., 1] ** 2
diou = iou - rho_2 / (c_2 + 1e-8)
v = (
(
tf.math.atan(bboxes1[..., 2] / (bboxes1[..., 3] + 1e-8))
- tf.math.atan(bboxes2[..., 2] / (bboxes2[..., 3] + 1e-8))
)
* 2
/ 3.1415926536
) ** 2
alpha = v / (1 - iou + v + 1e-8)
ciou = diou - alpha * v
return ciou
class SaveWeightsCallback(Callback):
def __init__(
self,
yolo,
dir_path: str = "trained-weights",
weights_type: str = "tf",
epoch_per_save: int = 1000,
):
super(SaveWeightsCallback, self).__init__()
self.yolo = yolo
self.weights_type = weights_type
self.epoch_per_save = epoch_per_save
makedirs(dir_path, exist_ok=True)
if self.yolo.tiny:
self.path_prefix = path.join(dir_path, "yolov4-tiny")
else:
self.path_prefix = path.join(dir_path, "yolov4")
if weights_type == "tf":
self.extension = "-checkpoint"
else:
self.extension = ".weights"
def on_train_end(self, logs=None):
self.yolo.save_weights(
"{}-final{}".format(self.path_prefix, self.extension),
weights_type=self.weights_type,
)
def on_epoch_end(self, epoch, logs=None):
if (epoch + 1) % self.epoch_per_save == 0:
self.yolo.save_weights(
"{}-{}{}".format(self.path_prefix, epoch + 1, self.extension),
weights_type=self.weights_type,
)
|
PypiClean
|
/chartpy-0.1.12.tar.gz/chartpy-0.1.12/chartpy_examples/notebooks/static/js/jquery/src/traversing/findFilter.js
|
define([
"../core",
"../var/indexOf",
"./var/rneedsContext",
"../selector"
], function( jQuery, indexOf, rneedsContext ) {
var risSimple = /^.[^:#\[\.,]*$/;
// Implement the identical functionality for filter and not
function winnow( elements, qualifier, not ) {
if ( jQuery.isFunction( qualifier ) ) {
return jQuery.grep( elements, function( elem, i ) {
/* jshint -W018 */
return !!qualifier.call( elem, i, elem ) !== not;
});
}
if ( qualifier.nodeType ) {
return jQuery.grep( elements, function( elem ) {
return ( elem === qualifier ) !== not;
});
}
if ( typeof qualifier === "string" ) {
if ( risSimple.test( qualifier ) ) {
return jQuery.filter( qualifier, elements, not );
}
qualifier = jQuery.filter( qualifier, elements );
}
return jQuery.grep( elements, function( elem ) {
return ( indexOf.call( qualifier, elem ) >= 0 ) !== not;
});
}
jQuery.filter = function( expr, elems, not ) {
var elem = elems[ 0 ];
if ( not ) {
expr = ":not(" + expr + ")";
}
return elems.length === 1 && elem.nodeType === 1 ?
jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] :
jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {
return elem.nodeType === 1;
}));
};
jQuery.fn.extend({
find: function( selector ) {
var i,
len = this.length,
ret = [],
self = this;
if ( typeof selector !== "string" ) {
return this.pushStack( jQuery( selector ).filter(function() {
for ( i = 0; i < len; i++ ) {
if ( jQuery.contains( self[ i ], this ) ) {
return true;
}
}
}) );
}
for ( i = 0; i < len; i++ ) {
jQuery.find( selector, self[ i ], ret );
}
// Needed because $( selector, context ) becomes $( context ).find( selector )
ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret );
ret.selector = this.selector ? this.selector + " " + selector : selector;
return ret;
},
filter: function( selector ) {
return this.pushStack( winnow(this, selector || [], false) );
},
not: function( selector ) {
return this.pushStack( winnow(this, selector || [], true) );
},
is: function( selector ) {
return !!winnow(
this,
// If this is a positional/relative selector, check membership in the returned set
// so $("p:first").is("p:last") won't return true for a doc with two "p".
typeof selector === "string" && rneedsContext.test( selector ) ?
jQuery( selector ) :
selector || [],
false
).length;
}
});
});
|
PypiClean
|
/adm_osc-0.0.1-py3-none-any.whl/adm_osc/handler.py
|
from . import protocol
__all__ = ['extract_indexes', 'adm_handler']
# _ _
# | |__ ___| |_ __ ___ _ __ ___
# | '_ \ / _ \ | '_ \ / _ \ '__/ __|
# | | | | __/ | |_) | __/ | \__ \
# |_| |_|\___|_| .__/ \___|_| |___/
# |_|
def extract_indexes(idx: str):
"""
+ "*" means all objects
+ [n-m] means range from "n" to "m"
+ [n, m, o] means specific object defined by n, m and o index ...
+ single int value == single object
"""
if type(idx) is not str:
return idx
if idx == '*':
return 'all'
# allow brace and parentheses instead of brackets; just replace them
idx = idx.replace('{', '[').replace('}', ']')
idx = idx.replace('(', '[').replace(')', ']')
if idx.startswith('[') and idx.endswith(']'):
# remove brackets
indexes = idx[1:-1]
# if 1 "-" founded it should be a range
if indexes.count('-') == 1:
indexes = indexes.split('-')
if len(indexes) == 2:
return {'from': int(indexes[0]), 'to': int(indexes[1])}
# if "," founded, it should be multiple index values
indexes = indexes.replace('-', ',').replace(' ', '').strip()
if indexes.startswith(','):
indexes = indexes[1:]
if indexes.endswith(','):
indexes = indexes[:-1]
indexes = indexes.split(',')
return [int(i) for i in indexes]
else:
# no range or multiple value found !
# so, consider single object value and just convert it to int
return int(idx)
# _ _ _
# | |__ __ _ _ __ __| | | ___ _ __ ___
# | '_ \ / _` | '_ \ / _` | |/ _ \ '__/ __|
# | | | | (_| | | | | (_| | | __/ | \__ \
# |_| |_|\__,_|_| |_|\__,_|_|\___|_| |___/
def adm_handler(address, *args):
"""
1 - check ADM message header at start of address
2 - extract target : object | setup ...
3 - extract object index:
+ single int value == single object
+ "*" means all objects
+ [n-m] means range from "n" to "m"
+ [n, m, o] means specific object defined by n, m and o index ...
4 - extract and validate command name; It should be in the provided protocol
5 - extract and validate all arguments
"""
#
it = address.split('/')
# 1
if it[1] != protocol.message_root:
raise ValueError(f'ERROR: unrecognized ADM address : "{address}" it should start with "/{protocol.message_root}/"')
# 2
target = it[2]
if target != 'obj' and target != 'setup':
raise ValueError(f'ERROR: unrecognized ADM address : "{address}" ! unknown target "/{target}/"')
# 3
objects = extract_indexes(it[3])
# 4
command = it[4]
parameter = protocol.find_parameter(command)
if parameter is None:
raise ValueError(f'ERROR: unrecognized ADM address : "{address}" ! unknown command "/{command}/"')
# filter touch / release messages for now !!!
# TODO: check with ADM-OSC group how we want to handle this
is_touch_release = len(args) == 1 and type(args[0]) is str and (args[0] == 'touch' or args[0] == 'release')
if not is_touch_release:
if len(args) != parameter.get_number_of_values():
raise ValueError(
f'ERROR: arguments are malformed for "{address} :: {args} ! '
f'bad number of arguments ! provided: {len(args)} - Expected: {parameter.get_number_of_values()}')
def _type_to_string(val_) -> str:
return f'{type(val_)}'.replace("<class '", "").replace("'>", "")
arguments_errors = []
parameters = parameter.get_parameters()
for i, param in enumerate(parameters):
_min = param.get_min_value()
_max = param.get_max_value()
_typ = param.type
val = args[i]
# else check all values
if _typ == protocol.Type.Float and type(val) is not float:
arguments_errors.append(f'argument {i} "{val}" type mismatch ! float is expected but "{_type_to_string(val)}" is provided')
elif _typ == protocol.Type.Int and type(val) is not int:
arguments_errors.append(f'argument {i} "{val}" type mismatch ! integer is expected but "{_type_to_string(val)}" is provided')
elif _typ == protocol.Type.String and type(val) is not str:
arguments_errors.append(f'argument {i} "{val}" type mismatch ! string is expected but "{_type_to_string(val)}" is provided')
elif val < _min:
arguments_errors.append(f'argument {i} "{val}" out of range ! it should be greater or equal than "{_min}"')
elif val > _max:
arguments_errors.append(f'argument {i} "{val}" out of range ! it should be less or equal than "{_max}"')
if len(arguments_errors) > 0:
errors = f'ERROR: arguments are malformed for "{address} :: {args}":\n'
for error in arguments_errors:
errors += f'\t{error}\n'
raise ValueError(errors)
return target, objects, parameter, args
|
PypiClean
|
/textgrid_tools-0.0.8-py3-none-any.whl/textgrid_tools/intervals/common.py
|
from typing import Generator, Iterable, List, Optional, Set, Tuple, Union
from textgrid.textgrid import Interval, IntervalTier
from textgrid_tools.helper import get_mark, interval_is_None_or_whitespace
def merge_intervals(intervals: List[Interval], join_symbol: str, ignore_empty: bool) -> Interval:
assert len(intervals) > 0
marks = (get_mark(interval) for interval in intervals)
if ignore_empty:
marks = (m for m in marks if m != "")
mark = join_symbol.join(marks)
first_interval = intervals[0]
last_interval = intervals[-1]
interval = Interval(
minTime=first_interval.minTime,
maxTime=last_interval.maxTime,
mark=mark,
)
return interval
def replace_intervals(tier: IntervalTier, intervals: List[Interval], replace_with: List[Interval]) -> None:
assert len(intervals) > 0
assert len(replace_with) > 0
assert intervals[0].minTime == replace_with[0].minTime
assert intervals[-1].maxTime == replace_with[-1].maxTime
from_index = tier.intervals.index(intervals[0])
for interval in intervals:
tier.intervals.remove(interval)
for interval in reversed(replace_with):
tier.intervals.insert(from_index, interval)
def group_adjacent_pauses(intervals: Iterable[Interval]) -> Generator[Union[Interval, List[Interval]], None, None]:
pause_group = []
for interval in intervals:
is_pause = interval_is_None_or_whitespace(interval)
if is_pause:
pause_group.append(interval)
else:
if len(pause_group) > 0:
yield pause_group
pause_group = []
yield interval
if len(pause_group) > 0:
yield pause_group
def group_adjacent_intervals(intervals: Iterable[Interval], marks: Set[str]) -> Generator[Union[Interval, List[Interval]], None, None]:
mark_group = []
for interval in intervals:
has_mark = interval.mark in marks
if has_mark:
mark_group.append(interval)
else:
if len(mark_group) > 0:
yield mark_group
mark_group = []
yield interval
if len(mark_group) > 0:
yield mark_group
def group_adjacent_pauses2(intervals: Iterable[Interval]) -> Generator[Tuple[List[Interval], bool], None, None]:
pause_group = []
for interval in intervals:
is_pause = interval_is_None_or_whitespace(interval)
if is_pause:
pause_group.append(interval)
else:
if len(pause_group) > 0:
yield pause_group, True
pause_group = []
yield [interval], False
if len(pause_group) > 0:
yield pause_group, True
def group_adjacent_content_and_pauses(intervals: Iterable[Interval]) -> Generator[Tuple[List[Interval], bool], None, None]:
current_group = []
current_group_is_pause: Optional[bool] = None
for interval in intervals:
is_pause = interval_is_None_or_whitespace(interval)
same_group = current_group_is_pause is not None and current_group_is_pause == is_pause
if not same_group and len(current_group) > 0:
assert current_group_is_pause is not None
yield current_group, current_group_is_pause
current_group = []
current_group.append(interval)
current_group_is_pause = is_pause
if len(current_group) > 0:
assert current_group_is_pause is not None
yield current_group, current_group_is_pause
|
PypiClean
|
/UPY-0.35.1.tar.gz/UPY-0.35.1/upy/contrib/cked/static/cked/ckeditor/lang/nb.js
|
/*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.lang['nb']={"dir":"ltr","editor":"Rikteksteditor","common":{"editorHelp":"Trykk ALT 0 for hjelp","browseServer":"Bla igjennom server","url":"URL","protocol":"Protokoll","upload":"Last opp","uploadSubmit":"Send det til serveren","image":"Bilde","flash":"Flash","form":"Skjema","checkbox":"Avmerkingsboks","radio":"Alternativknapp","textField":"Tekstboks","textarea":"Tekstområde","hiddenField":"Skjult felt","button":"Knapp","select":"Rullegardinliste","imageButton":"Bildeknapp","notSet":"<ikke satt>","id":"Id","name":"Navn","langDir":"Språkretning","langDirLtr":"Venstre til høyre (VTH)","langDirRtl":"Høyre til venstre (HTV)","langCode":"Språkkode","longDescr":"Utvidet beskrivelse","cssClass":"Stilarkklasser","advisoryTitle":"Tittel","cssStyle":"Stil","ok":"OK","cancel":"Avbryt","close":"Lukk","preview":"Forhåndsvis","resize":"Dra for å skalere","generalTab":"Generelt","advancedTab":"Avansert","validateNumberFailed":"Denne verdien er ikke et tall.","confirmNewPage":"Alle ulagrede endringer som er gjort i dette innholdet vil bli tapt. Er du sikker på at du vil laste en ny side?","confirmCancel":"Noen av valgene har blitt endret. Er du sikker på at du vil lukke dialogen?","options":"Valg","target":"Mål","targetNew":"Nytt vindu (_blank)","targetTop":"Hele vindu (_top)","targetSelf":"Samme vindu (_self)","targetParent":"Foreldrevindu (_parent)","langDirLTR":"Venstre til høyre (VTH)","langDirRTL":"Høyre til venstre (HTV)","styles":"Stil","cssClasses":"Stilarkklasser","width":"Bredde","height":"Høyde","align":"Juster","alignLeft":"Venstre","alignRight":"Høyre","alignCenter":"Midtjuster","alignTop":"Topp","alignMiddle":"Midten","alignBottom":"Bunn","invalidValue":"Ugyldig verdi.","invalidHeight":"Høyde må være et tall.","invalidWidth":"Bredde må være et tall.","invalidCssLength":"Den angitte verdien for feltet \"%1\" må være et positivt tall med eller uten en gyldig CSS-målingsenhet (px, %, in, cm, mm, em, ex, pt, eller pc).","invalidHtmlLength":"Den angitte verdien for feltet \"%1\" må være et positivt tall med eller uten en gyldig HTML-målingsenhet (px eller %).","invalidInlineStyle":"Verdi angitt for inline stil må bestå av en eller flere sett med formatet \"navn : verdi\", separert med semikolon","cssLengthTooltip":"Skriv inn et tall for en piksel-verdi eller et tall med en gyldig CSS-enhet (px, %, in, cm, mm, em, ex, pt, eller pc).","unavailable":"%1<span class=\"cke_accessibility\">, utilgjenglig</span>"},"about":{"copy":"Copyright © $1. Alle rettigheter reservert.","dlgTitle":"Om CKEditor","help":"Se $1 for hjelp.","moreInfo":"For lisensieringsinformasjon, vennligst besøk vårt nettsted:","title":"Om CKEditor","userGuide":"CKEditors brukerveiledning"},"basicstyles":{"bold":"Fet","italic":"Kursiv","strike":"Gjennomstreking","subscript":"Senket skrift","superscript":"Hevet skrift","underline":"Understreking"},"bidi":{"ltr":"Tekstretning fra venstre til høyre","rtl":"Tekstretning fra høyre til venstre"},"blockquote":{"toolbar":"Sitatblokk"},"clipboard":{"copy":"Kopier","copyError":"Din nettlesers sikkerhetsinstillinger tillater ikke automatisk kopiering av tekst. Vennligst bruk snarveien (Ctrl/Cmd+C).","cut":"Klipp ut","cutError":"Din nettlesers sikkerhetsinstillinger tillater ikke automatisk utklipping av tekst. Vennligst bruk snarveien (Ctrl/Cmd+X).","paste":"Lim inn","pasteArea":"Innlimingsområde","pasteMsg":"Vennligst lim inn i følgende boks med tastaturet (<STRONG>Ctrl/Cmd+V</STRONG>) og trykk <STRONG>OK</STRONG>.","securityMsg":"Din nettlesers sikkerhetsinstillinger gir ikke redigeringsverktøyet direkte tilgang til utklippstavlen. Du må derfor lime det inn på nytt i dette vinduet.","title":"Lim inn"},"colorbutton":{"auto":"Automatisk","bgColorTitle":"Bakgrunnsfarge","colors":{"000":"Svart","800000":"Rødbrun","8B4513":"Salbrun","2F4F4F":"Grønnsvart","008080":"Blågrønn","000080":"Marineblått","4B0082":"Indigo","696969":"Mørk grå","B22222":"Mørkerød","A52A2A":"Brun","DAA520":"Lys brun","006400":"Mørk grønn","40E0D0":"Turkis","0000CD":"Medium blå","800080":"Purpur","808080":"Grå","F00":"Rød","FF8C00":"Mørk oransje","FFD700":"Gull","008000":"Grønn","0FF":"Cyan","00F":"Blå","EE82EE":"Fiolett","A9A9A9":"Svak grå","FFA07A":"Rosa-oransje","FFA500":"Oransje","FFFF00":"Gul","00FF00":"Lime","AFEEEE":"Svak turkis","ADD8E6":"Lys Blå","DDA0DD":"Plomme","D3D3D3":"Lys grå","FFF0F5":"Svak lavendelrosa","FAEBD7":"Antikk-hvit","FFFFE0":"Lys gul","F0FFF0":"Honningmelon","F0FFFF":"Svakt asurblått","F0F8FF":"Svak cyan","E6E6FA":"Lavendel","FFF":"Hvit"},"more":"Flere farger...","panelTitle":"Farger","textColorTitle":"Tekstfarge"},"colordialog":{"clear":"Tøm","highlight":"Merk","options":"Alternativer for farge","selected":"Valgt","title":"Velg farge"},"templates":{"button":"Maler","emptyListMsg":"(Ingen maler definert)","insertOption":"Erstatt gjeldende innhold","options":"Alternativer for mal","selectPromptMsg":"Velg malen du vil åpne i redigeringsverktøyet:","title":"Innholdsmaler"},"contextmenu":{"options":"Alternativer for høyreklikkmeny"},"div":{"IdInputLabel":"Id","advisoryTitleInputLabel":"Tittel","cssClassInputLabel":"Stilark-klasser","edit":"Rediger Div","inlineStyleInputLabel":"Inlinestiler","langDirLTRLabel":"Venstre til høyre (VTH)","langDirLabel":"Språkretning","langDirRTLLabel":"Høyre til venstre (HTV)","languageCodeInputLabel":" Språkkode","remove":"Fjern Div","styleSelectLabel":"Stil","title":"Sett inn Div Container","toolbar":"Sett inn Div Container"},"toolbar":{"toolbarCollapse":"Skjul verktøylinje","toolbarExpand":"Vis verktøylinje","toolbarGroups":{"document":"Dokument","clipboard":"Utklippstavle/Angre","editing":"Redigering","forms":"Skjema","basicstyles":"Basisstiler","paragraph":"Avsnitt","links":"Lenker","insert":"Innsetting","styles":"Stiler","colors":"Farger","tools":"Verktøy"},"toolbars":"Verktøylinjer for editor"},"elementspath":{"eleLabel":"Element-sti","eleTitle":"%1 element"},"list":{"bulletedlist":"Legg til/Fjern punktmerket liste","numberedlist":"Legg til/Fjern nummerert liste"},"indent":{"indent":"Øk innrykk","outdent":"Reduser innrykk"},"find":{"find":"Søk","findOptions":"Søkealternativer","findWhat":"Søk etter:","matchCase":"Skill mellom store og små bokstaver","matchCyclic":"Søk i hele dokumentet","matchWord":"Bare hele ord","notFoundMsg":"Fant ikke søketeksten.","replace":"Erstatt","replaceAll":"Erstatt alle","replaceSuccessMsg":"%1 tilfelle(r) erstattet.","replaceWith":"Erstatt med:","title":"Søk og erstatt"},"fakeobjects":{"anchor":"Anker","flash":"Flash-animasjon","hiddenfield":"Skjult felt","iframe":"IFrame","unknown":"Ukjent objekt"},"flash":{"access":"Scripttilgang","accessAlways":"Alltid","accessNever":"Aldri","accessSameDomain":"Samme domene","alignAbsBottom":"Abs bunn","alignAbsMiddle":"Abs midten","alignBaseline":"Bunnlinje","alignTextTop":"Tekst topp","bgcolor":"Bakgrunnsfarge","chkFull":"Tillat fullskjerm","chkLoop":"Loop","chkMenu":"Slå på Flash-meny","chkPlay":"Autospill","flashvars":"Variabler for flash","hSpace":"HMarg","properties":"Egenskaper for Flash-objekt","propertiesTab":"Egenskaper","quality":"Kvalitet","qualityAutoHigh":"Auto høy","qualityAutoLow":"Auto lav","qualityBest":"Best","qualityHigh":"Høy","qualityLow":"Lav","qualityMedium":"Medium","scale":"Skaler","scaleAll":"Vis alt","scaleFit":"Skaler til å passe","scaleNoBorder":"Ingen ramme","title":"Flash-egenskaper","vSpace":"VMarg","validateHSpace":"HMarg må være et tall.","validateSrc":"Vennligst skriv inn lenkens url.","validateVSpace":"VMarg må være et tall.","windowMode":"Vindumodus","windowModeOpaque":"Opaque","windowModeTransparent":"Gjennomsiktig","windowModeWindow":"Vindu"},"font":{"fontSize":{"label":"Størrelse","voiceLabel":"Font Størrelse","panelTitle":"Størrelse"},"label":"Skrift","panelTitle":"Skrift","voiceLabel":"Font"},"forms":{"button":{"title":"Egenskaper for knapp","text":"Tekst (verdi)","type":"Type","typeBtn":"Knapp","typeSbm":"Send","typeRst":"Nullstill"},"checkboxAndRadio":{"checkboxTitle":"Egenskaper for avmerkingsboks","radioTitle":"Egenskaper for alternativknapp","value":"Verdi","selected":"Valgt"},"form":{"title":"Egenskaper for skjema","menu":"Egenskaper for skjema","action":"Handling","method":"Metode","encoding":"Encoding"},"hidden":{"title":"Egenskaper for skjult felt","name":"Navn","value":"Verdi"},"select":{"title":"Egenskaper for rullegardinliste","selectInfo":"Info","opAvail":"Tilgjenglige alternativer","value":"Verdi","size":"Størrelse","lines":"Linjer","chkMulti":"Tillat flervalg","opText":"Tekst","opValue":"Verdi","btnAdd":"Legg til","btnModify":"Endre","btnUp":"Opp","btnDown":"Ned","btnSetValue":"Sett som valgt","btnDelete":"Slett"},"textarea":{"title":"Egenskaper for tekstområde","cols":"Kolonner","rows":"Rader"},"textfield":{"title":"Egenskaper for tekstfelt","name":"Navn","value":"Verdi","charWidth":"Tegnbredde","maxChars":"Maks antall tegn","type":"Type","typeText":"Tekst","typePass":"Passord","typeEmail":"Email","typeSearch":"Search","typeTel":"Telephone Number","typeUrl":"URL"}},"format":{"label":"Format","panelTitle":"Avsnittsformat","tag_address":"Adresse","tag_div":"Normal (DIV)","tag_h1":"Overskrift 1","tag_h2":"Overskrift 2","tag_h3":"Overskrift 3","tag_h4":"Overskrift 4","tag_h5":"Overskrift 5","tag_h6":"Overskrift 6","tag_p":"Normal","tag_pre":"Formatert"},"horizontalrule":{"toolbar":"Sett inn horisontal linje"},"iframe":{"border":"Viss ramme rundt iframe","noUrl":"Vennligst skriv inn URL for iframe","scrolling":"Aktiver scrollefelt","title":"Egenskaper for IFrame","toolbar":"IFrame"},"image":{"alertUrl":"Vennligst skriv bilde-urlen","alt":"Alternativ tekst","border":"Ramme","btnUpload":"Send det til serveren","button2Img":"Vil du endre den valgte bildeknappen til et vanlig bilde?","hSpace":"HMarg","img2Button":"Vil du endre det valgte bildet til en bildeknapp?","infoTab":"Bildeinformasjon","linkTab":"Lenke","lockRatio":"Lås forhold","menu":"Bildeegenskaper","resetSize":"Tilbakestill størrelse","title":"Bildeegenskaper","titleButton":"Egenskaper for bildeknapp","upload":"Last opp","urlMissing":"Bildets adresse mangler.","vSpace":"VMarg","validateBorder":"Ramme må være et heltall.","validateHSpace":"HMarg må være et heltall.","validateVSpace":"VMarg må være et heltall."},"smiley":{"options":"Alternativer for smil","title":"Sett inn smil","toolbar":"Smil"},"justify":{"block":"Blokkjuster","center":"Midtstill","left":"Venstrejuster","right":"Høyrejuster"},"link":{"acccessKey":"Aksessknapp","advanced":"Avansert","advisoryContentType":"Type","advisoryTitle":"Tittel","anchor":{"toolbar":"Sett inn/Rediger anker","menu":"Egenskaper for anker","title":"Egenskaper for anker","name":"Ankernavn","errorName":"Vennligst skriv inn ankernavnet","remove":"Fjern anker"},"anchorId":"Element etter ID","anchorName":"Anker etter navn","charset":"Lenket tegnsett","cssClasses":"Stilarkklasser","emailAddress":"E-postadresse","emailBody":"Melding","emailSubject":"Meldingsemne","id":"Id","info":"Lenkeinfo","langCode":"Språkkode","langDir":"Språkretning","langDirLTR":"Venstre til høyre (VTH)","langDirRTL":"Høyre til venstre (HTV)","menu":"Rediger lenke","name":"Navn","noAnchors":"(Ingen anker i dokumentet)","noEmail":"Vennligst skriv inn e-postadressen","noUrl":"Vennligst skriv inn lenkens URL","other":"<annen>","popupDependent":"Avhenging (Netscape)","popupFeatures":"Egenskaper for popup-vindu","popupFullScreen":"Fullskjerm (IE)","popupLeft":"Venstre posisjon","popupLocationBar":"Adresselinje","popupMenuBar":"Menylinje","popupResizable":"Skalerbar","popupScrollBars":"Scrollbar","popupStatusBar":"Statuslinje","popupToolbar":"Verktøylinje","popupTop":"Topp-posisjon","rel":"Relasjon (rel)","selectAnchor":"Velg et anker","styles":"Stil","tabIndex":"Tabindeks","target":"Mål","targetFrame":"<ramme>","targetFrameName":"Målramme","targetPopup":"<popup-vindu>","targetPopupName":"Navn på popup-vindu","title":"Lenke","toAnchor":"Lenke til anker i teksten","toEmail":"E-post","toUrl":"URL","toolbar":"Sett inn/Rediger lenke","type":"Lenketype","unlink":"Fjern lenke","upload":"Last opp"},"liststyle":{"armenian":"Armensk nummerering","bulletedTitle":"Egenskaper for punktmerket liste","circle":"Sirkel","decimal":"Tall (1, 2, 3, osv.)","decimalLeadingZero":"Tall, med førstesiffer null (01, 02, 03, osv.)","disc":"Disk","georgian":"Georgisk nummerering (an, ban, gan, osv.)","lowerAlpha":"Alfabetisk, små (a, b, c, d, e, osv.)","lowerGreek":"Gresk, små (alpha, beta, gamma, osv.)","lowerRoman":"Romertall, små (i, ii, iii, iv, v, osv.)","none":"Ingen","notset":"<ikke satt>","numberedTitle":"Egenskaper for nummerert liste","square":"Firkant","start":"Start","type":"Type","upperAlpha":"Alfabetisk, store (A, B, C, D, E, osv.)","upperRoman":"Romertall, store (I, II, III, IV, V, osv.)","validateStartNumber":"Starten på listen må være et heltall."},"magicline":{"title":"Insert paragraph here"},"maximize":{"maximize":"Maksimer","minimize":"Minimer"},"newpage":{"toolbar":"Ny side"},"pagebreak":{"alt":"Sideskift","toolbar":"Sett inn sideskift for utskrift"},"pastetext":{"button":"Lim inn som ren tekst","title":"Lim inn som ren tekst"},"pastefromword":{"confirmCleanup":"Teksten du limer inn ser ut til å være kopiert fra Word. Vil du renske den før du limer den inn?","error":"Det var ikke mulig å renske den innlimte teksten på grunn av en intern feil","title":"Lim inn fra Word","toolbar":"Lim inn fra Word"},"preview":{"preview":"Forhåndsvis"},"print":{"toolbar":"Skriv ut"},"removeformat":{"toolbar":"Fjern formatering"},"save":{"toolbar":"Lagre"},"selectall":{"toolbar":"Merk alt"},"showblocks":{"toolbar":"Vis blokker"},"sourcearea":{"toolbar":"Kilde"},"specialchar":{"options":"Alternativer for spesialtegn","title":"Velg spesialtegn","toolbar":"Sett inn spesialtegn"},"scayt":{"about":"Om SCAYT","aboutTab":"Om","addWord":"Legg til ord","allCaps":"Ikke kontroller ord med kun store bokstaver","dic_create":"Opprett","dic_delete":"Slett","dic_field_name":"Ordboknavn","dic_info":"Brukerordboken lagres først i en informasjonskapsel på din maskin, men det er en begrensning på hvor mye som kan lagres her. Når ordboken blir for stor til å lagres i en informasjonskapsel, vil vi i stedet lagre ordboken på vår server. For å lagre din personlige ordbok på vår server, burde du velge et navn for ordboken din. Hvis du allerede har lagret en ordbok, vennligst skriv inn ordbokens navn og klikk på Gjenopprett-knappen.","dic_rename":"Gi nytt navn","dic_restore":"Gjenopprett","dictionariesTab":"Ordbøker","disable":"Slå av SCAYT","emptyDic":"Ordboknavn bør ikke være tom.","enable":"Slå på SCAYT","ignore":"Ignorer","ignoreAll":"Ignorer Alle","ignoreDomainNames":"Ikke kontroller domenenavn","langs":"Språk","languagesTab":"Språk","mixedCase":"Ikke kontroller ord med blandet små og store bokstaver","mixedWithDigits":"Ikke kontroller ord som inneholder tall","moreSuggestions":"Flere forslag","opera_title":"Ikke støttet av Opera","options":"Valg","optionsTab":"Valg","title":"Stavekontroll mens du skriver","toggle":"Veksle SCAYT","noSuggestions":"No suggestion"},"stylescombo":{"label":"Stil","panelTitle":"Stilformater","panelTitle1":"Blokkstiler","panelTitle2":"Inlinestiler","panelTitle3":"Objektstiler"},"table":{"border":"Rammestørrelse","caption":"Tittel","cell":{"menu":"Celle","insertBefore":"Sett inn celle før","insertAfter":"Sett inn celle etter","deleteCell":"Slett celler","merge":"Slå sammen celler","mergeRight":"Slå sammen høyre","mergeDown":"Slå sammen ned","splitHorizontal":"Del celle horisontalt","splitVertical":"Del celle vertikalt","title":"Celleegenskaper","cellType":"Celletype","rowSpan":"Radspenn","colSpan":"Kolonnespenn","wordWrap":"Tekstbrytning","hAlign":"Horisontal justering","vAlign":"Vertikal justering","alignBaseline":"Grunnlinje","bgColor":"Bakgrunnsfarge","borderColor":"Rammefarge","data":"Data","header":"Overskrift","yes":"Ja","no":"Nei","invalidWidth":"Cellebredde må være et tall.","invalidHeight":"Cellehøyde må være et tall.","invalidRowSpan":"Radspenn må være et heltall.","invalidColSpan":"Kolonnespenn må være et heltall.","chooseColor":"Velg"},"cellPad":"Cellepolstring","cellSpace":"Cellemarg","column":{"menu":"Kolonne","insertBefore":"Sett inn kolonne før","insertAfter":"Sett inn kolonne etter","deleteColumn":"Slett kolonner"},"columns":"Kolonner","deleteTable":"Slett tabell","headers":"Overskrifter","headersBoth":"Begge","headersColumn":"Første kolonne","headersNone":"Ingen","headersRow":"Første rad","invalidBorder":"Rammestørrelse må være et tall.","invalidCellPadding":"Cellepolstring må være et positivt tall.","invalidCellSpacing":"Cellemarg må være et positivt tall.","invalidCols":"Antall kolonner må være et tall større enn 0.","invalidHeight":"Tabellhøyde må være et tall.","invalidRows":"Antall rader må være et tall større enn 0.","invalidWidth":"Tabellbredde må være et tall.","menu":"Egenskaper for tabell","row":{"menu":"Rader","insertBefore":"Sett inn rad før","insertAfter":"Sett inn rad etter","deleteRow":"Slett rader"},"rows":"Rader","summary":"Sammendrag","title":"Egenskaper for tabell","toolbar":"Tabell","widthPc":"prosent","widthPx":"piksler","widthUnit":"Bredde-enhet"},"undo":{"redo":"Gjør om","undo":"Angre"},"wsc":{"btnIgnore":"Ignorer","btnIgnoreAll":"Ignorer alle","btnReplace":"Erstatt","btnReplaceAll":"Erstatt alle","btnUndo":"Angre","changeTo":"Endre til","errorLoading":"Feil under lasting av applikasjonstjenestetjener: %s.","ieSpellDownload":"Stavekontroll er ikke installert. Vil du laste den ned nå?","manyChanges":"Stavekontroll fullført: %1 ord endret","noChanges":"Stavekontroll fullført: ingen ord endret","noMispell":"Stavekontroll fullført: ingen feilstavinger funnet","noSuggestions":"- Ingen forslag -","notAvailable":"Beklager, tjenesten er utilgjenglig nå.","notInDic":"Ikke i ordboken","oneChange":"Stavekontroll fullført: Ett ord endret","progress":"Stavekontroll pågår...","title":"Stavekontroll","toolbar":"Stavekontroll"}};
|
PypiClean
|
/PyDvi-0.1.0.tar.gz/PyDvi-0.1.0/PyDviGui/Widgets/MainWindowBase.py
|
####################################################################################################
#
# PyDvi - A Python Library to Process DVI Stream
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
__all__ = ['MainWindowBase']
####################################################################################################
import logging
from PyQt4 import QtGui
####################################################################################################
class MainWindowBase(QtGui.QMainWindow):
_logger = logging.getLogger(__name__)
##############################################
def __init__(self, title='', parent=None):
super(MainWindowBase, self).__init__(parent)
self.setWindowTitle(title)
self._application = QtGui.QApplication.instance()
self.init_menu()
##############################################
@property
def application(self):
return self._application
@property
def menu_bar(self):
return self.menuBar()
@property
def file_menu(self):
return self._file_menu
@property
def help_menu(self):
return self._help_menu
##############################################
def init_menu(self):
application = self._application
self._file_menu = file_menu = self.menu_bar.addMenu('File')
file_menu.addAction(application.exit_action) # Fixme: At the end
self._help_menu = help_menu = self.menu_bar.addMenu('Help')
help_menu.addAction(application.help_action)
help_menu.addSeparator()
help_menu.addAction(application.about_action)
help_menu.addAction(application.show_system_information_action)
##############################################
def show_message(self, message=None, echo=False, timeout=0):
""" Hides the normal status indications and displays the given message for the specified
number of milli-seconds (timeout). If timeout is 0 (default), the message remains displayed
until the clearMessage() slot is called or until the showMessage() slot is called again to
change the message.
Note that showMessage() is called to show temporary explanations of tool tip texts, so
passing a timeout of 0 is not sufficient to display a permanent message.
"""
status_bar = self.statusBar()
if message is None:
status_bar.clearMessage()
else:
status_bar.showMessage(message, timeout)
if echo:
self._logger.info(message)
# self.application.processEvents()
##############################################
def translate(self, text):
return self._application.translate(self.__class__.__name__,
text,
None,
QtGui.QApplication.UnicodeUTF8)
####################################################################################################
#
# End
#
####################################################################################################
|
PypiClean
|
/unitsnet-py-0.1.57.tar.gz/unitsnet-py-0.1.57/unitsnet_py/units/specific_energy.py
|
from enum import Enum
import math
from ..abstract_unit import AbstractMeasure
class SpecificEnergyUnits(Enum):
"""
SpecificEnergyUnits enumeration
"""
JoulePerKilogram = 'joule_per_kilogram'
"""
"""
MegaJoulePerTonne = 'mega_joule_per_tonne'
"""
"""
CaloriePerGram = 'calorie_per_gram'
"""
"""
WattHourPerKilogram = 'watt_hour_per_kilogram'
"""
"""
WattDayPerKilogram = 'watt_day_per_kilogram'
"""
"""
WattDayPerTonne = 'watt_day_per_tonne'
"""
"""
WattDayPerShortTon = 'watt_day_per_short_ton'
"""
"""
WattHourPerPound = 'watt_hour_per_pound'
"""
"""
BtuPerPound = 'btu_per_pound'
"""
"""
KilojoulePerKilogram = 'kilojoule_per_kilogram'
"""
"""
MegajoulePerKilogram = 'megajoule_per_kilogram'
"""
"""
KilocaloriePerGram = 'kilocalorie_per_gram'
"""
"""
KilowattHourPerKilogram = 'kilowatt_hour_per_kilogram'
"""
"""
MegawattHourPerKilogram = 'megawatt_hour_per_kilogram'
"""
"""
GigawattHourPerKilogram = 'gigawatt_hour_per_kilogram'
"""
"""
KilowattDayPerKilogram = 'kilowatt_day_per_kilogram'
"""
"""
MegawattDayPerKilogram = 'megawatt_day_per_kilogram'
"""
"""
GigawattDayPerKilogram = 'gigawatt_day_per_kilogram'
"""
"""
TerawattDayPerKilogram = 'terawatt_day_per_kilogram'
"""
"""
KilowattDayPerTonne = 'kilowatt_day_per_tonne'
"""
"""
MegawattDayPerTonne = 'megawatt_day_per_tonne'
"""
"""
GigawattDayPerTonne = 'gigawatt_day_per_tonne'
"""
"""
TerawattDayPerTonne = 'terawatt_day_per_tonne'
"""
"""
KilowattDayPerShortTon = 'kilowatt_day_per_short_ton'
"""
"""
MegawattDayPerShortTon = 'megawatt_day_per_short_ton'
"""
"""
GigawattDayPerShortTon = 'gigawatt_day_per_short_ton'
"""
"""
TerawattDayPerShortTon = 'terawatt_day_per_short_ton'
"""
"""
KilowattHourPerPound = 'kilowatt_hour_per_pound'
"""
"""
MegawattHourPerPound = 'megawatt_hour_per_pound'
"""
"""
GigawattHourPerPound = 'gigawatt_hour_per_pound'
"""
"""
class SpecificEnergy(AbstractMeasure):
"""
The SpecificEnergy
Args:
value (float): The value.
from_unit (SpecificEnergyUnits): The SpecificEnergy unit to create from, The default unit is JoulePerKilogram
"""
def __init__(self, value: float, from_unit: SpecificEnergyUnits = SpecificEnergyUnits.JoulePerKilogram):
if math.isnan(value):
raise ValueError('Invalid unit: value is NaN')
self._value = self.__convert_to_base(value, from_unit)
self.__joules_per_kilogram = None
self.__mega_joules_per_tonne = None
self.__calories_per_gram = None
self.__watt_hours_per_kilogram = None
self.__watt_days_per_kilogram = None
self.__watt_days_per_tonne = None
self.__watt_days_per_short_ton = None
self.__watt_hours_per_pound = None
self.__btu_per_pound = None
self.__kilojoules_per_kilogram = None
self.__megajoules_per_kilogram = None
self.__kilocalories_per_gram = None
self.__kilowatt_hours_per_kilogram = None
self.__megawatt_hours_per_kilogram = None
self.__gigawatt_hours_per_kilogram = None
self.__kilowatt_days_per_kilogram = None
self.__megawatt_days_per_kilogram = None
self.__gigawatt_days_per_kilogram = None
self.__terawatt_days_per_kilogram = None
self.__kilowatt_days_per_tonne = None
self.__megawatt_days_per_tonne = None
self.__gigawatt_days_per_tonne = None
self.__terawatt_days_per_tonne = None
self.__kilowatt_days_per_short_ton = None
self.__megawatt_days_per_short_ton = None
self.__gigawatt_days_per_short_ton = None
self.__terawatt_days_per_short_ton = None
self.__kilowatt_hours_per_pound = None
self.__megawatt_hours_per_pound = None
self.__gigawatt_hours_per_pound = None
def convert(self, unit: SpecificEnergyUnits) -> float:
return self.__convert_from_base(unit)
def __convert_from_base(self, from_unit: SpecificEnergyUnits) -> float:
value = self._value
if from_unit == SpecificEnergyUnits.JoulePerKilogram:
return (value)
if from_unit == SpecificEnergyUnits.MegaJoulePerTonne:
return (value / 1e3)
if from_unit == SpecificEnergyUnits.CaloriePerGram:
return (value / 4.184e3)
if from_unit == SpecificEnergyUnits.WattHourPerKilogram:
return (value / 3.6e3)
if from_unit == SpecificEnergyUnits.WattDayPerKilogram:
return (value / (24 * 3.6e3))
if from_unit == SpecificEnergyUnits.WattDayPerTonne:
return (value / ((24 * 3.6e3) / 1e3))
if from_unit == SpecificEnergyUnits.WattDayPerShortTon:
return (value / ((24 * 3.6e3) / 9.0718474e2))
if from_unit == SpecificEnergyUnits.WattHourPerPound:
return (value / 7.93664e3)
if from_unit == SpecificEnergyUnits.BtuPerPound:
return (value / 2326.000075362)
if from_unit == SpecificEnergyUnits.KilojoulePerKilogram:
return ((value) / 1000.0)
if from_unit == SpecificEnergyUnits.MegajoulePerKilogram:
return ((value) / 1000000.0)
if from_unit == SpecificEnergyUnits.KilocaloriePerGram:
return ((value / 4.184e3) / 1000.0)
if from_unit == SpecificEnergyUnits.KilowattHourPerKilogram:
return ((value / 3.6e3) / 1000.0)
if from_unit == SpecificEnergyUnits.MegawattHourPerKilogram:
return ((value / 3.6e3) / 1000000.0)
if from_unit == SpecificEnergyUnits.GigawattHourPerKilogram:
return ((value / 3.6e3) / 1000000000.0)
if from_unit == SpecificEnergyUnits.KilowattDayPerKilogram:
return ((value / (24 * 3.6e3)) / 1000.0)
if from_unit == SpecificEnergyUnits.MegawattDayPerKilogram:
return ((value / (24 * 3.6e3)) / 1000000.0)
if from_unit == SpecificEnergyUnits.GigawattDayPerKilogram:
return ((value / (24 * 3.6e3)) / 1000000000.0)
if from_unit == SpecificEnergyUnits.TerawattDayPerKilogram:
return ((value / (24 * 3.6e3)) / 1000000000000.0)
if from_unit == SpecificEnergyUnits.KilowattDayPerTonne:
return ((value / ((24 * 3.6e3) / 1e3)) / 1000.0)
if from_unit == SpecificEnergyUnits.MegawattDayPerTonne:
return ((value / ((24 * 3.6e3) / 1e3)) / 1000000.0)
if from_unit == SpecificEnergyUnits.GigawattDayPerTonne:
return ((value / ((24 * 3.6e3) / 1e3)) / 1000000000.0)
if from_unit == SpecificEnergyUnits.TerawattDayPerTonne:
return ((value / ((24 * 3.6e3) / 1e3)) / 1000000000000.0)
if from_unit == SpecificEnergyUnits.KilowattDayPerShortTon:
return ((value / ((24 * 3.6e3) / 9.0718474e2)) / 1000.0)
if from_unit == SpecificEnergyUnits.MegawattDayPerShortTon:
return ((value / ((24 * 3.6e3) / 9.0718474e2)) / 1000000.0)
if from_unit == SpecificEnergyUnits.GigawattDayPerShortTon:
return ((value / ((24 * 3.6e3) / 9.0718474e2)) / 1000000000.0)
if from_unit == SpecificEnergyUnits.TerawattDayPerShortTon:
return ((value / ((24 * 3.6e3) / 9.0718474e2)) / 1000000000000.0)
if from_unit == SpecificEnergyUnits.KilowattHourPerPound:
return ((value / 7.93664e3) / 1000.0)
if from_unit == SpecificEnergyUnits.MegawattHourPerPound:
return ((value / 7.93664e3) / 1000000.0)
if from_unit == SpecificEnergyUnits.GigawattHourPerPound:
return ((value / 7.93664e3) / 1000000000.0)
return None
def __convert_to_base(self, value: float, to_unit: SpecificEnergyUnits) -> float:
if to_unit == SpecificEnergyUnits.JoulePerKilogram:
return (value)
if to_unit == SpecificEnergyUnits.MegaJoulePerTonne:
return (value * 1e3)
if to_unit == SpecificEnergyUnits.CaloriePerGram:
return (value * 4.184e3)
if to_unit == SpecificEnergyUnits.WattHourPerKilogram:
return (value * 3.6e3)
if to_unit == SpecificEnergyUnits.WattDayPerKilogram:
return (value * (24 * 3.6e3))
if to_unit == SpecificEnergyUnits.WattDayPerTonne:
return (value * ((24 * 3.6e3) / 1e3))
if to_unit == SpecificEnergyUnits.WattDayPerShortTon:
return (value * ((24 * 3.6e3) / 9.0718474e2))
if to_unit == SpecificEnergyUnits.WattHourPerPound:
return (value * 7.93664e3)
if to_unit == SpecificEnergyUnits.BtuPerPound:
return (value * 2326.000075362)
if to_unit == SpecificEnergyUnits.KilojoulePerKilogram:
return ((value) * 1000.0)
if to_unit == SpecificEnergyUnits.MegajoulePerKilogram:
return ((value) * 1000000.0)
if to_unit == SpecificEnergyUnits.KilocaloriePerGram:
return ((value * 4.184e3) * 1000.0)
if to_unit == SpecificEnergyUnits.KilowattHourPerKilogram:
return ((value * 3.6e3) * 1000.0)
if to_unit == SpecificEnergyUnits.MegawattHourPerKilogram:
return ((value * 3.6e3) * 1000000.0)
if to_unit == SpecificEnergyUnits.GigawattHourPerKilogram:
return ((value * 3.6e3) * 1000000000.0)
if to_unit == SpecificEnergyUnits.KilowattDayPerKilogram:
return ((value * (24 * 3.6e3)) * 1000.0)
if to_unit == SpecificEnergyUnits.MegawattDayPerKilogram:
return ((value * (24 * 3.6e3)) * 1000000.0)
if to_unit == SpecificEnergyUnits.GigawattDayPerKilogram:
return ((value * (24 * 3.6e3)) * 1000000000.0)
if to_unit == SpecificEnergyUnits.TerawattDayPerKilogram:
return ((value * (24 * 3.6e3)) * 1000000000000.0)
if to_unit == SpecificEnergyUnits.KilowattDayPerTonne:
return ((value * ((24 * 3.6e3) / 1e3)) * 1000.0)
if to_unit == SpecificEnergyUnits.MegawattDayPerTonne:
return ((value * ((24 * 3.6e3) / 1e3)) * 1000000.0)
if to_unit == SpecificEnergyUnits.GigawattDayPerTonne:
return ((value * ((24 * 3.6e3) / 1e3)) * 1000000000.0)
if to_unit == SpecificEnergyUnits.TerawattDayPerTonne:
return ((value * ((24 * 3.6e3) / 1e3)) * 1000000000000.0)
if to_unit == SpecificEnergyUnits.KilowattDayPerShortTon:
return ((value * ((24 * 3.6e3) / 9.0718474e2)) * 1000.0)
if to_unit == SpecificEnergyUnits.MegawattDayPerShortTon:
return ((value * ((24 * 3.6e3) / 9.0718474e2)) * 1000000.0)
if to_unit == SpecificEnergyUnits.GigawattDayPerShortTon:
return ((value * ((24 * 3.6e3) / 9.0718474e2)) * 1000000000.0)
if to_unit == SpecificEnergyUnits.TerawattDayPerShortTon:
return ((value * ((24 * 3.6e3) / 9.0718474e2)) * 1000000000000.0)
if to_unit == SpecificEnergyUnits.KilowattHourPerPound:
return ((value * 7.93664e3) * 1000.0)
if to_unit == SpecificEnergyUnits.MegawattHourPerPound:
return ((value * 7.93664e3) * 1000000.0)
if to_unit == SpecificEnergyUnits.GigawattHourPerPound:
return ((value * 7.93664e3) * 1000000000.0)
return None
@property
def base_value(self) -> float:
return self._value
@staticmethod
def from_joules_per_kilogram(joules_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in joules_per_kilogram.
:param meters: The SpecificEnergy value in joules_per_kilogram.
:type joules_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(joules_per_kilogram, SpecificEnergyUnits.JoulePerKilogram)
@staticmethod
def from_mega_joules_per_tonne(mega_joules_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in mega_joules_per_tonne.
:param meters: The SpecificEnergy value in mega_joules_per_tonne.
:type mega_joules_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(mega_joules_per_tonne, SpecificEnergyUnits.MegaJoulePerTonne)
@staticmethod
def from_calories_per_gram(calories_per_gram: float):
"""
Create a new instance of SpecificEnergy from a value in calories_per_gram.
:param meters: The SpecificEnergy value in calories_per_gram.
:type calories_per_gram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(calories_per_gram, SpecificEnergyUnits.CaloriePerGram)
@staticmethod
def from_watt_hours_per_kilogram(watt_hours_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in watt_hours_per_kilogram.
:param meters: The SpecificEnergy value in watt_hours_per_kilogram.
:type watt_hours_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(watt_hours_per_kilogram, SpecificEnergyUnits.WattHourPerKilogram)
@staticmethod
def from_watt_days_per_kilogram(watt_days_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in watt_days_per_kilogram.
:param meters: The SpecificEnergy value in watt_days_per_kilogram.
:type watt_days_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(watt_days_per_kilogram, SpecificEnergyUnits.WattDayPerKilogram)
@staticmethod
def from_watt_days_per_tonne(watt_days_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in watt_days_per_tonne.
:param meters: The SpecificEnergy value in watt_days_per_tonne.
:type watt_days_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(watt_days_per_tonne, SpecificEnergyUnits.WattDayPerTonne)
@staticmethod
def from_watt_days_per_short_ton(watt_days_per_short_ton: float):
"""
Create a new instance of SpecificEnergy from a value in watt_days_per_short_ton.
:param meters: The SpecificEnergy value in watt_days_per_short_ton.
:type watt_days_per_short_ton: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(watt_days_per_short_ton, SpecificEnergyUnits.WattDayPerShortTon)
@staticmethod
def from_watt_hours_per_pound(watt_hours_per_pound: float):
"""
Create a new instance of SpecificEnergy from a value in watt_hours_per_pound.
:param meters: The SpecificEnergy value in watt_hours_per_pound.
:type watt_hours_per_pound: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(watt_hours_per_pound, SpecificEnergyUnits.WattHourPerPound)
@staticmethod
def from_btu_per_pound(btu_per_pound: float):
"""
Create a new instance of SpecificEnergy from a value in btu_per_pound.
:param meters: The SpecificEnergy value in btu_per_pound.
:type btu_per_pound: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(btu_per_pound, SpecificEnergyUnits.BtuPerPound)
@staticmethod
def from_kilojoules_per_kilogram(kilojoules_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in kilojoules_per_kilogram.
:param meters: The SpecificEnergy value in kilojoules_per_kilogram.
:type kilojoules_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilojoules_per_kilogram, SpecificEnergyUnits.KilojoulePerKilogram)
@staticmethod
def from_megajoules_per_kilogram(megajoules_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in megajoules_per_kilogram.
:param meters: The SpecificEnergy value in megajoules_per_kilogram.
:type megajoules_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megajoules_per_kilogram, SpecificEnergyUnits.MegajoulePerKilogram)
@staticmethod
def from_kilocalories_per_gram(kilocalories_per_gram: float):
"""
Create a new instance of SpecificEnergy from a value in kilocalories_per_gram.
:param meters: The SpecificEnergy value in kilocalories_per_gram.
:type kilocalories_per_gram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilocalories_per_gram, SpecificEnergyUnits.KilocaloriePerGram)
@staticmethod
def from_kilowatt_hours_per_kilogram(kilowatt_hours_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in kilowatt_hours_per_kilogram.
:param meters: The SpecificEnergy value in kilowatt_hours_per_kilogram.
:type kilowatt_hours_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilowatt_hours_per_kilogram, SpecificEnergyUnits.KilowattHourPerKilogram)
@staticmethod
def from_megawatt_hours_per_kilogram(megawatt_hours_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in megawatt_hours_per_kilogram.
:param meters: The SpecificEnergy value in megawatt_hours_per_kilogram.
:type megawatt_hours_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megawatt_hours_per_kilogram, SpecificEnergyUnits.MegawattHourPerKilogram)
@staticmethod
def from_gigawatt_hours_per_kilogram(gigawatt_hours_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in gigawatt_hours_per_kilogram.
:param meters: The SpecificEnergy value in gigawatt_hours_per_kilogram.
:type gigawatt_hours_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(gigawatt_hours_per_kilogram, SpecificEnergyUnits.GigawattHourPerKilogram)
@staticmethod
def from_kilowatt_days_per_kilogram(kilowatt_days_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in kilowatt_days_per_kilogram.
:param meters: The SpecificEnergy value in kilowatt_days_per_kilogram.
:type kilowatt_days_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilowatt_days_per_kilogram, SpecificEnergyUnits.KilowattDayPerKilogram)
@staticmethod
def from_megawatt_days_per_kilogram(megawatt_days_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in megawatt_days_per_kilogram.
:param meters: The SpecificEnergy value in megawatt_days_per_kilogram.
:type megawatt_days_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megawatt_days_per_kilogram, SpecificEnergyUnits.MegawattDayPerKilogram)
@staticmethod
def from_gigawatt_days_per_kilogram(gigawatt_days_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in gigawatt_days_per_kilogram.
:param meters: The SpecificEnergy value in gigawatt_days_per_kilogram.
:type gigawatt_days_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(gigawatt_days_per_kilogram, SpecificEnergyUnits.GigawattDayPerKilogram)
@staticmethod
def from_terawatt_days_per_kilogram(terawatt_days_per_kilogram: float):
"""
Create a new instance of SpecificEnergy from a value in terawatt_days_per_kilogram.
:param meters: The SpecificEnergy value in terawatt_days_per_kilogram.
:type terawatt_days_per_kilogram: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(terawatt_days_per_kilogram, SpecificEnergyUnits.TerawattDayPerKilogram)
@staticmethod
def from_kilowatt_days_per_tonne(kilowatt_days_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in kilowatt_days_per_tonne.
:param meters: The SpecificEnergy value in kilowatt_days_per_tonne.
:type kilowatt_days_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilowatt_days_per_tonne, SpecificEnergyUnits.KilowattDayPerTonne)
@staticmethod
def from_megawatt_days_per_tonne(megawatt_days_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in megawatt_days_per_tonne.
:param meters: The SpecificEnergy value in megawatt_days_per_tonne.
:type megawatt_days_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megawatt_days_per_tonne, SpecificEnergyUnits.MegawattDayPerTonne)
@staticmethod
def from_gigawatt_days_per_tonne(gigawatt_days_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in gigawatt_days_per_tonne.
:param meters: The SpecificEnergy value in gigawatt_days_per_tonne.
:type gigawatt_days_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(gigawatt_days_per_tonne, SpecificEnergyUnits.GigawattDayPerTonne)
@staticmethod
def from_terawatt_days_per_tonne(terawatt_days_per_tonne: float):
"""
Create a new instance of SpecificEnergy from a value in terawatt_days_per_tonne.
:param meters: The SpecificEnergy value in terawatt_days_per_tonne.
:type terawatt_days_per_tonne: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(terawatt_days_per_tonne, SpecificEnergyUnits.TerawattDayPerTonne)
@staticmethod
def from_kilowatt_days_per_short_ton(kilowatt_days_per_short_ton: float):
"""
Create a new instance of SpecificEnergy from a value in kilowatt_days_per_short_ton.
:param meters: The SpecificEnergy value in kilowatt_days_per_short_ton.
:type kilowatt_days_per_short_ton: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilowatt_days_per_short_ton, SpecificEnergyUnits.KilowattDayPerShortTon)
@staticmethod
def from_megawatt_days_per_short_ton(megawatt_days_per_short_ton: float):
"""
Create a new instance of SpecificEnergy from a value in megawatt_days_per_short_ton.
:param meters: The SpecificEnergy value in megawatt_days_per_short_ton.
:type megawatt_days_per_short_ton: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megawatt_days_per_short_ton, SpecificEnergyUnits.MegawattDayPerShortTon)
@staticmethod
def from_gigawatt_days_per_short_ton(gigawatt_days_per_short_ton: float):
"""
Create a new instance of SpecificEnergy from a value in gigawatt_days_per_short_ton.
:param meters: The SpecificEnergy value in gigawatt_days_per_short_ton.
:type gigawatt_days_per_short_ton: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(gigawatt_days_per_short_ton, SpecificEnergyUnits.GigawattDayPerShortTon)
@staticmethod
def from_terawatt_days_per_short_ton(terawatt_days_per_short_ton: float):
"""
Create a new instance of SpecificEnergy from a value in terawatt_days_per_short_ton.
:param meters: The SpecificEnergy value in terawatt_days_per_short_ton.
:type terawatt_days_per_short_ton: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(terawatt_days_per_short_ton, SpecificEnergyUnits.TerawattDayPerShortTon)
@staticmethod
def from_kilowatt_hours_per_pound(kilowatt_hours_per_pound: float):
"""
Create a new instance of SpecificEnergy from a value in kilowatt_hours_per_pound.
:param meters: The SpecificEnergy value in kilowatt_hours_per_pound.
:type kilowatt_hours_per_pound: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(kilowatt_hours_per_pound, SpecificEnergyUnits.KilowattHourPerPound)
@staticmethod
def from_megawatt_hours_per_pound(megawatt_hours_per_pound: float):
"""
Create a new instance of SpecificEnergy from a value in megawatt_hours_per_pound.
:param meters: The SpecificEnergy value in megawatt_hours_per_pound.
:type megawatt_hours_per_pound: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(megawatt_hours_per_pound, SpecificEnergyUnits.MegawattHourPerPound)
@staticmethod
def from_gigawatt_hours_per_pound(gigawatt_hours_per_pound: float):
"""
Create a new instance of SpecificEnergy from a value in gigawatt_hours_per_pound.
:param meters: The SpecificEnergy value in gigawatt_hours_per_pound.
:type gigawatt_hours_per_pound: float
:return: A new instance of SpecificEnergy.
:rtype: SpecificEnergy
"""
return SpecificEnergy(gigawatt_hours_per_pound, SpecificEnergyUnits.GigawattHourPerPound)
@property
def joules_per_kilogram(self) -> float:
"""
"""
if self.__joules_per_kilogram != None:
return self.__joules_per_kilogram
self.__joules_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.JoulePerKilogram)
return self.__joules_per_kilogram
@property
def mega_joules_per_tonne(self) -> float:
"""
"""
if self.__mega_joules_per_tonne != None:
return self.__mega_joules_per_tonne
self.__mega_joules_per_tonne = self.__convert_from_base(SpecificEnergyUnits.MegaJoulePerTonne)
return self.__mega_joules_per_tonne
@property
def calories_per_gram(self) -> float:
"""
"""
if self.__calories_per_gram != None:
return self.__calories_per_gram
self.__calories_per_gram = self.__convert_from_base(SpecificEnergyUnits.CaloriePerGram)
return self.__calories_per_gram
@property
def watt_hours_per_kilogram(self) -> float:
"""
"""
if self.__watt_hours_per_kilogram != None:
return self.__watt_hours_per_kilogram
self.__watt_hours_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.WattHourPerKilogram)
return self.__watt_hours_per_kilogram
@property
def watt_days_per_kilogram(self) -> float:
"""
"""
if self.__watt_days_per_kilogram != None:
return self.__watt_days_per_kilogram
self.__watt_days_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.WattDayPerKilogram)
return self.__watt_days_per_kilogram
@property
def watt_days_per_tonne(self) -> float:
"""
"""
if self.__watt_days_per_tonne != None:
return self.__watt_days_per_tonne
self.__watt_days_per_tonne = self.__convert_from_base(SpecificEnergyUnits.WattDayPerTonne)
return self.__watt_days_per_tonne
@property
def watt_days_per_short_ton(self) -> float:
"""
"""
if self.__watt_days_per_short_ton != None:
return self.__watt_days_per_short_ton
self.__watt_days_per_short_ton = self.__convert_from_base(SpecificEnergyUnits.WattDayPerShortTon)
return self.__watt_days_per_short_ton
@property
def watt_hours_per_pound(self) -> float:
"""
"""
if self.__watt_hours_per_pound != None:
return self.__watt_hours_per_pound
self.__watt_hours_per_pound = self.__convert_from_base(SpecificEnergyUnits.WattHourPerPound)
return self.__watt_hours_per_pound
@property
def btu_per_pound(self) -> float:
"""
"""
if self.__btu_per_pound != None:
return self.__btu_per_pound
self.__btu_per_pound = self.__convert_from_base(SpecificEnergyUnits.BtuPerPound)
return self.__btu_per_pound
@property
def kilojoules_per_kilogram(self) -> float:
"""
"""
if self.__kilojoules_per_kilogram != None:
return self.__kilojoules_per_kilogram
self.__kilojoules_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.KilojoulePerKilogram)
return self.__kilojoules_per_kilogram
@property
def megajoules_per_kilogram(self) -> float:
"""
"""
if self.__megajoules_per_kilogram != None:
return self.__megajoules_per_kilogram
self.__megajoules_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.MegajoulePerKilogram)
return self.__megajoules_per_kilogram
@property
def kilocalories_per_gram(self) -> float:
"""
"""
if self.__kilocalories_per_gram != None:
return self.__kilocalories_per_gram
self.__kilocalories_per_gram = self.__convert_from_base(SpecificEnergyUnits.KilocaloriePerGram)
return self.__kilocalories_per_gram
@property
def kilowatt_hours_per_kilogram(self) -> float:
"""
"""
if self.__kilowatt_hours_per_kilogram != None:
return self.__kilowatt_hours_per_kilogram
self.__kilowatt_hours_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.KilowattHourPerKilogram)
return self.__kilowatt_hours_per_kilogram
@property
def megawatt_hours_per_kilogram(self) -> float:
"""
"""
if self.__megawatt_hours_per_kilogram != None:
return self.__megawatt_hours_per_kilogram
self.__megawatt_hours_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.MegawattHourPerKilogram)
return self.__megawatt_hours_per_kilogram
@property
def gigawatt_hours_per_kilogram(self) -> float:
"""
"""
if self.__gigawatt_hours_per_kilogram != None:
return self.__gigawatt_hours_per_kilogram
self.__gigawatt_hours_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.GigawattHourPerKilogram)
return self.__gigawatt_hours_per_kilogram
@property
def kilowatt_days_per_kilogram(self) -> float:
"""
"""
if self.__kilowatt_days_per_kilogram != None:
return self.__kilowatt_days_per_kilogram
self.__kilowatt_days_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.KilowattDayPerKilogram)
return self.__kilowatt_days_per_kilogram
@property
def megawatt_days_per_kilogram(self) -> float:
"""
"""
if self.__megawatt_days_per_kilogram != None:
return self.__megawatt_days_per_kilogram
self.__megawatt_days_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.MegawattDayPerKilogram)
return self.__megawatt_days_per_kilogram
@property
def gigawatt_days_per_kilogram(self) -> float:
"""
"""
if self.__gigawatt_days_per_kilogram != None:
return self.__gigawatt_days_per_kilogram
self.__gigawatt_days_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.GigawattDayPerKilogram)
return self.__gigawatt_days_per_kilogram
@property
def terawatt_days_per_kilogram(self) -> float:
"""
"""
if self.__terawatt_days_per_kilogram != None:
return self.__terawatt_days_per_kilogram
self.__terawatt_days_per_kilogram = self.__convert_from_base(SpecificEnergyUnits.TerawattDayPerKilogram)
return self.__terawatt_days_per_kilogram
@property
def kilowatt_days_per_tonne(self) -> float:
"""
"""
if self.__kilowatt_days_per_tonne != None:
return self.__kilowatt_days_per_tonne
self.__kilowatt_days_per_tonne = self.__convert_from_base(SpecificEnergyUnits.KilowattDayPerTonne)
return self.__kilowatt_days_per_tonne
@property
def megawatt_days_per_tonne(self) -> float:
"""
"""
if self.__megawatt_days_per_tonne != None:
return self.__megawatt_days_per_tonne
self.__megawatt_days_per_tonne = self.__convert_from_base(SpecificEnergyUnits.MegawattDayPerTonne)
return self.__megawatt_days_per_tonne
@property
def gigawatt_days_per_tonne(self) -> float:
"""
"""
if self.__gigawatt_days_per_tonne != None:
return self.__gigawatt_days_per_tonne
self.__gigawatt_days_per_tonne = self.__convert_from_base(SpecificEnergyUnits.GigawattDayPerTonne)
return self.__gigawatt_days_per_tonne
@property
def terawatt_days_per_tonne(self) -> float:
"""
"""
if self.__terawatt_days_per_tonne != None:
return self.__terawatt_days_per_tonne
self.__terawatt_days_per_tonne = self.__convert_from_base(SpecificEnergyUnits.TerawattDayPerTonne)
return self.__terawatt_days_per_tonne
@property
def kilowatt_days_per_short_ton(self) -> float:
"""
"""
if self.__kilowatt_days_per_short_ton != None:
return self.__kilowatt_days_per_short_ton
self.__kilowatt_days_per_short_ton = self.__convert_from_base(SpecificEnergyUnits.KilowattDayPerShortTon)
return self.__kilowatt_days_per_short_ton
@property
def megawatt_days_per_short_ton(self) -> float:
"""
"""
if self.__megawatt_days_per_short_ton != None:
return self.__megawatt_days_per_short_ton
self.__megawatt_days_per_short_ton = self.__convert_from_base(SpecificEnergyUnits.MegawattDayPerShortTon)
return self.__megawatt_days_per_short_ton
@property
def gigawatt_days_per_short_ton(self) -> float:
"""
"""
if self.__gigawatt_days_per_short_ton != None:
return self.__gigawatt_days_per_short_ton
self.__gigawatt_days_per_short_ton = self.__convert_from_base(SpecificEnergyUnits.GigawattDayPerShortTon)
return self.__gigawatt_days_per_short_ton
@property
def terawatt_days_per_short_ton(self) -> float:
"""
"""
if self.__terawatt_days_per_short_ton != None:
return self.__terawatt_days_per_short_ton
self.__terawatt_days_per_short_ton = self.__convert_from_base(SpecificEnergyUnits.TerawattDayPerShortTon)
return self.__terawatt_days_per_short_ton
@property
def kilowatt_hours_per_pound(self) -> float:
"""
"""
if self.__kilowatt_hours_per_pound != None:
return self.__kilowatt_hours_per_pound
self.__kilowatt_hours_per_pound = self.__convert_from_base(SpecificEnergyUnits.KilowattHourPerPound)
return self.__kilowatt_hours_per_pound
@property
def megawatt_hours_per_pound(self) -> float:
"""
"""
if self.__megawatt_hours_per_pound != None:
return self.__megawatt_hours_per_pound
self.__megawatt_hours_per_pound = self.__convert_from_base(SpecificEnergyUnits.MegawattHourPerPound)
return self.__megawatt_hours_per_pound
@property
def gigawatt_hours_per_pound(self) -> float:
"""
"""
if self.__gigawatt_hours_per_pound != None:
return self.__gigawatt_hours_per_pound
self.__gigawatt_hours_per_pound = self.__convert_from_base(SpecificEnergyUnits.GigawattHourPerPound)
return self.__gigawatt_hours_per_pound
def to_string(self, unit: SpecificEnergyUnits = SpecificEnergyUnits.JoulePerKilogram) -> str:
"""
Format the SpecificEnergy to string.
Note! the default format for SpecificEnergy is JoulePerKilogram.
To specify the unit format set the 'unit' parameter.
"""
if unit == SpecificEnergyUnits.JoulePerKilogram:
return f"""{self.joules_per_kilogram} J/kg"""
if unit == SpecificEnergyUnits.MegaJoulePerTonne:
return f"""{self.mega_joules_per_tonne} MJ/t"""
if unit == SpecificEnergyUnits.CaloriePerGram:
return f"""{self.calories_per_gram} cal/g"""
if unit == SpecificEnergyUnits.WattHourPerKilogram:
return f"""{self.watt_hours_per_kilogram} Wh/kg"""
if unit == SpecificEnergyUnits.WattDayPerKilogram:
return f"""{self.watt_days_per_kilogram} Wd/kg"""
if unit == SpecificEnergyUnits.WattDayPerTonne:
return f"""{self.watt_days_per_tonne} Wd/t"""
if unit == SpecificEnergyUnits.WattDayPerShortTon:
return f"""{self.watt_days_per_short_ton} Wd/ST"""
if unit == SpecificEnergyUnits.WattHourPerPound:
return f"""{self.watt_hours_per_pound} Wh/lbs"""
if unit == SpecificEnergyUnits.BtuPerPound:
return f"""{self.btu_per_pound} btu/lb"""
if unit == SpecificEnergyUnits.KilojoulePerKilogram:
return f"""{self.kilojoules_per_kilogram} """
if unit == SpecificEnergyUnits.MegajoulePerKilogram:
return f"""{self.megajoules_per_kilogram} """
if unit == SpecificEnergyUnits.KilocaloriePerGram:
return f"""{self.kilocalories_per_gram} """
if unit == SpecificEnergyUnits.KilowattHourPerKilogram:
return f"""{self.kilowatt_hours_per_kilogram} """
if unit == SpecificEnergyUnits.MegawattHourPerKilogram:
return f"""{self.megawatt_hours_per_kilogram} """
if unit == SpecificEnergyUnits.GigawattHourPerKilogram:
return f"""{self.gigawatt_hours_per_kilogram} """
if unit == SpecificEnergyUnits.KilowattDayPerKilogram:
return f"""{self.kilowatt_days_per_kilogram} """
if unit == SpecificEnergyUnits.MegawattDayPerKilogram:
return f"""{self.megawatt_days_per_kilogram} """
if unit == SpecificEnergyUnits.GigawattDayPerKilogram:
return f"""{self.gigawatt_days_per_kilogram} """
if unit == SpecificEnergyUnits.TerawattDayPerKilogram:
return f"""{self.terawatt_days_per_kilogram} """
if unit == SpecificEnergyUnits.KilowattDayPerTonne:
return f"""{self.kilowatt_days_per_tonne} """
if unit == SpecificEnergyUnits.MegawattDayPerTonne:
return f"""{self.megawatt_days_per_tonne} """
if unit == SpecificEnergyUnits.GigawattDayPerTonne:
return f"""{self.gigawatt_days_per_tonne} """
if unit == SpecificEnergyUnits.TerawattDayPerTonne:
return f"""{self.terawatt_days_per_tonne} """
if unit == SpecificEnergyUnits.KilowattDayPerShortTon:
return f"""{self.kilowatt_days_per_short_ton} """
if unit == SpecificEnergyUnits.MegawattDayPerShortTon:
return f"""{self.megawatt_days_per_short_ton} """
if unit == SpecificEnergyUnits.GigawattDayPerShortTon:
return f"""{self.gigawatt_days_per_short_ton} """
if unit == SpecificEnergyUnits.TerawattDayPerShortTon:
return f"""{self.terawatt_days_per_short_ton} """
if unit == SpecificEnergyUnits.KilowattHourPerPound:
return f"""{self.kilowatt_hours_per_pound} """
if unit == SpecificEnergyUnits.MegawattHourPerPound:
return f"""{self.megawatt_hours_per_pound} """
if unit == SpecificEnergyUnits.GigawattHourPerPound:
return f"""{self.gigawatt_hours_per_pound} """
return f'{self._value}'
def get_unit_abbreviation(self, unit_abbreviation: SpecificEnergyUnits = SpecificEnergyUnits.JoulePerKilogram) -> str:
"""
Get SpecificEnergy unit abbreviation.
Note! the default abbreviation for SpecificEnergy is JoulePerKilogram.
To specify the unit abbreviation set the 'unit_abbreviation' parameter.
"""
if unit_abbreviation == SpecificEnergyUnits.JoulePerKilogram:
return """J/kg"""
if unit_abbreviation == SpecificEnergyUnits.MegaJoulePerTonne:
return """MJ/t"""
if unit_abbreviation == SpecificEnergyUnits.CaloriePerGram:
return """cal/g"""
if unit_abbreviation == SpecificEnergyUnits.WattHourPerKilogram:
return """Wh/kg"""
if unit_abbreviation == SpecificEnergyUnits.WattDayPerKilogram:
return """Wd/kg"""
if unit_abbreviation == SpecificEnergyUnits.WattDayPerTonne:
return """Wd/t"""
if unit_abbreviation == SpecificEnergyUnits.WattDayPerShortTon:
return """Wd/ST"""
if unit_abbreviation == SpecificEnergyUnits.WattHourPerPound:
return """Wh/lbs"""
if unit_abbreviation == SpecificEnergyUnits.BtuPerPound:
return """btu/lb"""
if unit_abbreviation == SpecificEnergyUnits.KilojoulePerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegajoulePerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilocaloriePerGram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilowattHourPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegawattHourPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.GigawattHourPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilowattDayPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegawattDayPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.GigawattDayPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.TerawattDayPerKilogram:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilowattDayPerTonne:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegawattDayPerTonne:
return """"""
if unit_abbreviation == SpecificEnergyUnits.GigawattDayPerTonne:
return """"""
if unit_abbreviation == SpecificEnergyUnits.TerawattDayPerTonne:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilowattDayPerShortTon:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegawattDayPerShortTon:
return """"""
if unit_abbreviation == SpecificEnergyUnits.GigawattDayPerShortTon:
return """"""
if unit_abbreviation == SpecificEnergyUnits.TerawattDayPerShortTon:
return """"""
if unit_abbreviation == SpecificEnergyUnits.KilowattHourPerPound:
return """"""
if unit_abbreviation == SpecificEnergyUnits.MegawattHourPerPound:
return """"""
if unit_abbreviation == SpecificEnergyUnits.GigawattHourPerPound:
return """"""
|
PypiClean
|
/my_santander_finance-0.3.6.tar.gz/my_santander_finance-0.3.6/my_santander_finance/sanfi.py
|
import logging
import sys
import click
from my_santander_finance.__init__ import __version__
from my_santander_finance.init import (
create_env_example,
download_chromedriver,
init_dir,
init_sqlite,
)
from my_santander_finance.logger import Logger
from my_santander_finance.web_scraping.get_amex import get_amex
from my_santander_finance.web_scraping.get_debito import get_debito
from my_santander_finance.web_scraping.get_visa import get_visa
# Starts logger for file
log = Logger().get_logger(__name__)
# This sets the root logger level to be info.
logging.root.setLevel(logging.INFO)
def show_version():
# log.info(f"sanfi version {__version__}")
print(__version__)
@click.command()
@click.option(
"--version",
default=False,
is_flag=True,
help="Show version",
)
@click.option(
"--debug",
default=False,
is_flag=True,
help="Activate debug mode",
)
@click.option(
"--debit",
default=False,
is_flag=True,
help="Procesa el reporte de consumo de la cuenta unica(debito)",
)
@click.option(
"--visa",
default=False,
is_flag=True,
help="Procesa el reporte de consumo de la tarjeta Visa",
)
@click.option(
"--amex",
default=False,
is_flag=True,
help="Procesa el reporte de consumo de la tarjeta American Express",
)
@click.option(
"--download",
default=False,
is_flag=True,
help="Download el reporte de la cuenta o tarjeta de credito del banco",
)
def main(version, debug, debit, visa, amex, download):
if version is True:
show_version()
sys.exit()
# Now I'm going to set debug mode to be true - Function that changes root level logging.
# This could be from anything.
# This could be from a user initiating --debug or its own function etc. Up to you.
if debug is True:
Logger().set_debug_mode(True)
if debit is False:
log.debug("Add --debit to get consumption of your account")
if visa is False:
log.debug("Add --visa to get consumption of your Visa credit card")
if amex is False:
log.debug("Add --amex to get consumption of your American Express credit card")
# creo directorios y tablas en la base de datos
init_dir()
create_env_example()
init_sqlite()
download_chromedriver()
# download el reporte de la tarjeta de debito
if debit:
get_debito(download)
# download el reporte de la tarjeta de credito Visa
if visa:
get_visa(download)
# download el reporte de la tarjeta de credito American Express
if amex:
get_amex(download)
# -----------------------------------------
if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
main()
|
PypiClean
|
/ccsi-foqus-3.18.0.tar.gz/ccsi-foqus-3.18.0/docs/source/chapt_matlab/reference/matlab_foqus_intro.rst
|
MATLAB-FOQUS interface
======================
Introduction
------------
MATLAB® is a proprietary interpreted programming language developed by MathWorks, and is highly used in many science and engineering areas for numeric computing.
Some important advantages of MATLAB include its ease of use and the large number of the available high-level functions for many applications. In this way, the
motivation to develop an interface between MATLAB and FOQUS is intended to facilitate to FOQUS users the use of MATLAB models and its integration with other
FOQUS supported modeling environments such as Aspen Plus and gPROMS, enabling the possibility to build highly complex cross-platform models which can then directly
leverage FOQUS capabilities for advanced analysis.
Two different but equivalent approaches were implemented for interfacing MATLAB and FOQUS, which can be used depending on the user needs. These two approaches are
described below:
.. warning:: The setup steps for the two approaches shown below were tested using MATLAB R2019b and Python 3.6, however they must work for other MATLAB and
Python versions.
Option 1: MATLAB - FOQUS direct
-------------------------------
This approach is best suited for MATLAB simulations that are not computationally intensive, although it can be used in those situations as well. This approach is fully
integrated with FOQUS, and it is implemented in a simple way to enable running MATLAB simulations within FOQUS.
To be able to call MATLAB models from FOQUS through the FOQUS plugin implementation, it is required to setup properly the MATLAB engine API for Python, which is
available for MATLAB-version R2014b or greater. MATLAB supports Python versions 2.7, 3.3, 3.4, 3.5, 3.6, 3.7, and 3.8. Further details regarding specific MATLAB
and Python versions compatibilities are given `here <https://www.mathworks.com/content/dam/mathworks/mathworks-dot-com/support/sysreq/files/python-compatibility.pdf>`_.
To install the MATLAB engine package follows the steps below, which require compatible versions of Python and MATLAB already installed, and also a valid MATLAB license.
The steps below assume that the Python distribution installed is Anaconda, but they also work for any other Python distribution.
1. Find out the MATLAB installation directory. To do this, just launch a new MATLAB session and type the instruction below::
matlabroot
2. Open an Anaconda command prompt. (Optional: activate the conda python environment if you are using a specific python environment for the installation).
3. Based on your operating system, move to the MATLAB installation folder, and then to the location that contains the python engine setup file. To do this, just type the
instruction below::
cd %matlabroot%\extern\engines\python
.. note:: ``%matlabroot%`` is the MATLAB installation folder from step 1.
Now, if you list all files in this directory, you must see a ``setup.py`` file there.
4. Install the MATLAB engine package by typing the code below::
python setup.py build --build-base="C:\matlabpybuild" install
.. note:: ``C:\matlabpybuild`` is a folder to build the Python package. Users can use any folder that they have access to.
If the MATLAB engine package was installed correctly, a similar message to the Figure 1 must be seen on the terminal window.
.. figure:: ../figs/terminal_window_msg.png
Figure 1 - Terminal window message after installating the MATLAB engine package
Now, to run MATLAB models within FOQUS follow the steps below:
1. Create a node simulation in the FOQUS flowsheet editor and define all input and output variables of the model.
2. Create a MATLAB function calling the model.
3. Call FOQUS plugin named "matlab_fs" to start a new MATLAB session. This can be done in the Model section at node editor.
In "Type" option choose "plugin", and in "Model" option choose "matlab_fs".
4. Connect to the current MATLAB session from the node script.
5. Create a MATLAB array object in the FOQUS node script containing the input parameters for the MATLAB model.
6. Call the MATLAB function/model.
7. Retrieve the outputs from the MATLAB function to FOQUS output variables.
8. Terminate MATLAB session.
Further details on how to use this option to interface MATLAB-FOQUS are given in the example presented in the
:ref:`tutorial 1<chapt_matlab/tutorial/matlab_foqus_tutorial:MATLAB-FOQUS interface - tutorials>`.
Option 2: MATLAB script implementation
--------------------------------------
This approach is best suited for MATLAB simulations that are computationally intensive, and FOQUS is used for data analysis and surrogate modeling.
In this option, the MATLAB-FOQUS interface runs MATLAB models directly in the MATLAB environment, but making the results/outputs fully compatible
with FOQUS modules. This is automatically achieved through a MATLAB script ``foqus_matlab_script.m`` provided with the FOQUS distribution, which can
be executed directly in MATLAB. To use the script, it is necessary to define the inputs for MATLAB models in the same order as were defined in
the FOQUS flowsheet.
The MATLAB script takes three inputs: 1) the MATLAB function containing the model, 2) the name of the PSUADE file containing the samples space for
the model, which needs to be created previously in the uncertainty module in FOQUS, 3) the path where the MATLAB function and PSUADE file are located.
The MATLAB script uses some functions available in FOQUS base code to handle :ref:`PSUADE full file format<file-formats>` and sample data objects,
and these functions are written in Python. For this reason, before using the script, it is necessary to configure MATLAB to execute Python modules. The steps for this
configuration are given below:
1. Find out where Python executable is located. To do this, open an Anaconda command prompt or a Terminal and type the code below::
python -c "import sys; print(sys.executable)"
2. Open a new MATLAB session and type the code below::
pyenv('Version', '%pythonroot%python.exe')
.. note:: ``%pythonroot%`` is the Python executable folder found in step 1. You can also verify if the Python config was stored in MATLAB by typing again ``pyenv``,
and then you must see the previous message again.
.. warning:: ``pyenv`` was first introduced in MATLAB R2019b. In older MATLAB versions, you need to use ``pyversion``, as shown below:
.. code-block:: python
pyversion('%pythonroot%python.exe')
3. Now, type the code line below::
py.numpy.arange(1)
.. note:: If you do not get errors, then the Python configuration is ready and skip the following steps. If you got this, or any similar error:
``Unable to resolve the name py.numpy.arange``, then you need to verify that the folder containing the Python binary files is included
in the system environment variables, for this, go to step 4.
4. In MATLAB, type the code below to see all folders that are added to the system path::
getenv('PATH')
.. note:: Check if ``%pythonroot%\Library\bin`` is already in the path, if not, follows step 5.
5. In MATLAB, type the code below::
setenv('PATH', ['%pythonroot%\Library\bin', pathsep, getenv('PATH')])
.. note:: Replace ``%pythonroot%`` with the Python executable folder found in step 1. You can also add manually the folder containing the Python
binary files to the system environment variables, but this will depend on the specific operating system.
6. Type again the code below::
py.numpy.arange(1)
.. note:: This time everything should work fine without errors.
After completing the configuration part to execute Python modules within MATLAB, the general steps to interfacing MATLAB and FOQUS are as follows:
1. Create a node simulation in the FOQUS flowsheet editor and define all input and output variables of the model.
2. Create a new ensemble for the sample space using the uncertainty quantification module in FOQUS.
3. Export the UQ Ensemble to :ref:`PSUADE full file format<file-formats>`.
4. Create a MATLAB function calling the model (it is necessary to define the inputs for the MATLAB function in the same order as were defined in
the FOQUS flowsheet in step 1).
5. Execute the MATLAB script ``foqus_matlab_script.m`` provided with FOQUS calling the MATLAB model function and the PSUADE file.
6. A new csv file ``outputs.csv`` fully compatible with FOQUS and containing the results from MATLAB simulations for the entire sample space is created.
7. Now, the ``outputs.csv`` file can be imported in FOQUS to use the different FOQUS capabilities for subsequent analysis.
Further details on how to use this option to interface MATLAB-FOQUS are given in the example presented in the
:ref:`tutorial 2<chapt_matlab/tutorial/matlab_foqus_tutorial:MATLAB-FOQUS interface - tutorials>`.
|
PypiClean
|
/cubicweb_wireit-1.3.0-py3-none-any.whl/cubicweb_wireit/data/wireit/lib/yui/uploader/uploader-min.js
|
* SWFObject v1.5: Flash Player detection and embed - http://blog.deconcept.com/swfobject/
*
* SWFObject is (c) 2007 Geoff Stearns and is released under the MIT License:
* http://www.opensource.org/licenses/mit-license.php
* @namespace YAHOO
*/
YAHOO.namespace("deconcept");YAHOO.deconcept=YAHOO.deconcept||{};if(typeof YAHOO.deconcept.util=="undefined"||!YAHOO.deconcept.util){YAHOO.deconcept.util={};}if(typeof YAHOO.deconcept.SWFObjectUtil=="undefined"||!YAHOO.deconcept.SWFObjectUtil){YAHOO.deconcept.SWFObjectUtil={};}YAHOO.deconcept.SWFObject=function(E,C,K,F,H,J,L,G,A,D){if(!document.getElementById){return;}this.DETECT_KEY=D?D:"detectflash";this.skipDetect=YAHOO.deconcept.util.getRequestParameter(this.DETECT_KEY);this.params={};this.variables={};this.attributes=[];if(E){this.setAttribute("swf",E);}if(C){this.setAttribute("id",C);}if(K){this.setAttribute("width",K);}if(F){this.setAttribute("height",F);}if(H){this.setAttribute("version",new YAHOO.deconcept.PlayerVersion(H.toString().split(".")));}this.installedVer=YAHOO.deconcept.SWFObjectUtil.getPlayerVersion();if(!window.opera&&document.all&&this.installedVer.major>7){YAHOO.deconcept.SWFObject.doPrepUnload=true;}if(J){this.addParam("bgcolor",J);}var B=L?L:"high";this.addParam("quality",B);this.setAttribute("useExpressInstall",false);this.setAttribute("doExpressInstall",false);var I=(G)?G:window.location;this.setAttribute("xiRedirectUrl",I);this.setAttribute("redirectUrl","");if(A){this.setAttribute("redirectUrl",A);}};YAHOO.deconcept.SWFObject.prototype={useExpressInstall:function(A){this.xiSWFPath=!A?"expressinstall.swf":A;this.setAttribute("useExpressInstall",true);},setAttribute:function(A,B){this.attributes[A]=B;},getAttribute:function(A){return this.attributes[A];},addParam:function(A,B){this.params[A]=B;},getParams:function(){return this.params;},addVariable:function(A,B){this.variables[A]=B;},getVariable:function(A){return this.variables[A];},getVariables:function(){return this.variables;},getVariablePairs:function(){var A=[];var B;var C=this.getVariables();for(B in C){if(C.hasOwnProperty(B)){A[A.length]=B+"="+C[B];}}return A;},getSWFHTML:function(){var D="";var C={};var A="";var B="";if(navigator.plugins&&navigator.mimeTypes&&navigator.mimeTypes.length){if(this.getAttribute("doExpressInstall")){this.addVariable("MMplayerType","PlugIn");this.setAttribute("swf",this.xiSWFPath);}D='<embed type="application/x-shockwave-flash" src="'+this.getAttribute("swf")+'" width="'+this.getAttribute("width")+'" height="'+this.getAttribute("height")+'" style="'+this.getAttribute("style")+'"';D+=' id="'+this.getAttribute("id")+'" name="'+this.getAttribute("id")+'" ';C=this.getParams();for(A in C){if(C.hasOwnProperty(A)){D+=[A]+'="'+C[A]+'" ';}}B=this.getVariablePairs().join("&");if(B.length>0){D+='flashvars="'+B+'"';}D+="/>";}else{if(this.getAttribute("doExpressInstall")){this.addVariable("MMplayerType","ActiveX");this.setAttribute("swf",this.xiSWFPath);}D='<object id="'+this.getAttribute("id")+'" classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" width="'+this.getAttribute("width")+'" height="'+this.getAttribute("height")+'" style="'+this.getAttribute("style")+'">';D+='<param name="movie" value="'+this.getAttribute("swf")+'" />';C=this.getParams();for(A in C){if(C.hasOwnProperty(A)){D+='<param name="'+A+'" value="'+C[A]+'" />';}}B=this.getVariablePairs().join("&");if(B.length>0){D+='<param name="flashvars" value="'+B+'" />';}D+="</object>";}return D;},write:function(A){if(this.getAttribute("useExpressInstall")){var B=new YAHOO.deconcept.PlayerVersion([6,0,65]);if(this.installedVer.versionIsValid(B)&&!this.installedVer.versionIsValid(this.getAttribute("version"))){this.setAttribute("doExpressInstall",true);this.addVariable("MMredirectURL",escape(this.getAttribute("xiRedirectUrl")));document.title=document.title.slice(0,47)+" - Flash Player Installation";this.addVariable("MMdoctitle",document.title);}}if(this.skipDetect||this.getAttribute("doExpressInstall")||this.installedVer.versionIsValid(this.getAttribute("version"))){var C=(typeof A=="string")?document.getElementById(A):A;C.innerHTML=this.getSWFHTML();return true;}else{if(this.getAttribute("redirectUrl")!==""){document.location.replace(this.getAttribute("redirectUrl"));}}return false;}};YAHOO.deconcept.SWFObjectUtil.getPlayerVersion=function(){var D=null;var C=new YAHOO.deconcept.PlayerVersion([0,0,0]);if(navigator.plugins&&navigator.mimeTypes.length){var A=navigator.plugins["Shockwave Flash"];if(A&&A.description){C=new YAHOO.deconcept.PlayerVersion(A.description.replace(/([a-zA-Z]|\s)+/,"").replace(/(\s+r|\s+b[0-9]+)/,".").split("."));}}else{if(navigator.userAgent&&navigator.userAgent.indexOf("Windows CE")>=0){var B=3;while(D){try{B++;D=new ActiveXObject("ShockwaveFlash.ShockwaveFlash."+B);C=new YAHOO.deconcept.PlayerVersion([B,0,0]);}catch(E){D=null;}}}else{try{D=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.7");}catch(E){try{D=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.6");C=new YAHOO.deconcept.PlayerVersion([6,0,21]);D.AllowScriptAccess="always";}catch(E){if(C.major==6){return C;}}try{D=new ActiveXObject("ShockwaveFlash.ShockwaveFlash");}catch(E){}}if(D!==null){C=new YAHOO.deconcept.PlayerVersion(D.GetVariable("$version").split(" ")[1].split(","));}}}return C;};YAHOO.deconcept.PlayerVersion=function(A){this.major=A[0]!==null?parseInt(A[0],0):0;this.minor=A[1]!==null?parseInt(A[1],0):0;this.rev=A[2]!==null?parseInt(A[2],0):0;};YAHOO.deconcept.PlayerVersion.prototype.versionIsValid=function(A){if(this.major<A.major){return false;}if(this.major>A.major){return true;}if(this.minor<A.minor){return false;}if(this.minor>A.minor){return true;}if(this.rev<A.rev){return false;}return true;};YAHOO.deconcept.util={getRequestParameter:function(D){var C=document.location.search||document.location.hash;if(D===null){return C;}if(C){var B=C.substring(1).split("&");for(var A=0;A<B.length;A++){if(B[A].substring(0,B[A].indexOf("="))==D){return B[A].substring((B[A].indexOf("=")+1));}}}return"";
}};YAHOO.deconcept.SWFObjectUtil.cleanupSWFs=function(){var C=document.getElementsByTagName("OBJECT");for(var B=C.length-1;B>=0;B--){C[B].style.display="none";for(var A in C[B]){if(typeof C[B][A]=="function"){C[B][A]=function(){};}}}};if(YAHOO.deconcept.SWFObject.doPrepUnload){if(!YAHOO.deconcept.unloadSet){YAHOO.deconcept.SWFObjectUtil.prepUnload=function(){__flash_unloadHandler=function(){};__flash_savedUnloadHandler=function(){};window.attachEvent("onunload",YAHOO.deconcept.SWFObjectUtil.cleanupSWFs);};window.attachEvent("onbeforeunload",YAHOO.deconcept.SWFObjectUtil.prepUnload);YAHOO.deconcept.unloadSet=true;}}if(!document.getElementById&&document.all){document.getElementById=function(A){return document.all[A];};}YAHOO.widget.FlashAdapter=function(E,A,B,C){this._queue=this._queue||[];this._events=this._events||{};this._configs=this._configs||{};B=B||{};this._id=B.id=B.id||YAHOO.util.Dom.generateId(null,"yuigen");B.version=B.version||"9.0.45";B.backgroundColor=B.backgroundColor||"#ffffff";this._attributes=B;this._swfURL=E;this._containerID=A;this._embedSWF(this._swfURL,this._containerID,B.id,B.version,B.backgroundColor,B.expressInstall,B.wmode,C);try{this.createEvent("contentReady");}catch(D){}};YAHOO.widget.FlashAdapter.owners=YAHOO.widget.FlashAdapter.owners||{};YAHOO.extend(YAHOO.widget.FlashAdapter,YAHOO.util.AttributeProvider,{_swfURL:null,_containerID:null,_swf:null,_id:null,_initialized:false,_attributes:null,toString:function(){return"FlashAdapter "+this._id;},destroy:function(){if(this._swf){var B=YAHOO.util.Dom.get(this._containerID);B.removeChild(this._swf);}var A=this._id;for(var C in this){if(YAHOO.lang.hasOwnProperty(this,C)){this[C]=null;}}},_embedSWF:function(J,I,E,C,F,G,B,H){var D=new YAHOO.deconcept.SWFObject(J,E,"100%","100%",C,F);if(G){D.useExpressInstall(G);}D.addParam("allowScriptAccess","always");if(B){D.addParam("wmode",B);}D.addParam("menu","false");D.addVariable("allowedDomain",document.location.hostname);D.addVariable("elementID",E);D.addVariable("eventHandler","YAHOO.widget.FlashAdapter.eventHandler");if(H){D.addVariable("buttonSkin",H);}var A=YAHOO.util.Dom.get(I);var K=D.write(A);if(K){this._swf=YAHOO.util.Dom.get(E);YAHOO.widget.FlashAdapter.owners[E]=this;}else{}},_eventHandler:function(B){var A=B.type;switch(A){case"swfReady":this._loadHandler();return;case"log":return;}this.fireEvent(A,B);},_loadHandler:function(){this._initialized=false;this._initAttributes(this._attributes);this.setAttributes(this._attributes,true);this._initialized=true;this.fireEvent("contentReady");},set:function(A,B){this._attributes[A]=B;YAHOO.widget.FlashAdapter.superclass.set.call(this,A,B);},_initAttributes:function(A){this.getAttributeConfig("altText",{method:this._getAltText});this.setAttributeConfig("altText",{method:this._setAltText});this.getAttributeConfig("swfURL",{method:this._getSWFURL});},_getSWFURL:function(){return this._swfURL;},_getAltText:function(){return this._swf.getAltText();},_setAltText:function(A){return this._swf.setAltText(A);}});YAHOO.widget.FlashAdapter.eventHandler=function(A,B){if(!YAHOO.widget.FlashAdapter.owners[A]){setTimeout(function(){YAHOO.widget.FlashAdapter.eventHandler(A,B);},0);}else{YAHOO.widget.FlashAdapter.owners[A]._eventHandler(B);}};YAHOO.widget.FlashAdapter.proxyFunctionCount=0;YAHOO.widget.FlashAdapter.createProxyFunction=function(B){var A=YAHOO.widget.FlashAdapter.proxyFunctionCount;YAHOO.widget.FlashAdapter["proxyFunction"+A]=function(){return B.apply(null,arguments);};YAHOO.widget.FlashAdapter.proxyFunctionCount++;return"YAHOO.widget.FlashAdapter.proxyFunction"+A.toString();};YAHOO.widget.FlashAdapter.removeProxyFunction=function(A){if(!A||A.indexOf("YAHOO.widget.FlashAdapter.proxyFunction")<0){return;}A=A.substr(26);YAHOO.widget.FlashAdapter[A]=null;};YAHOO.widget.Uploader=function(A,B,D){var C="window";if(!(B)||(B&&D)){C="transparent";}YAHOO.widget.Uploader.superclass.constructor.call(this,YAHOO.widget.Uploader.SWFURL,A,{wmode:C},B);this.createEvent("mouseDown");this.createEvent("mouseUp");this.createEvent("rollOver");this.createEvent("rollOut");this.createEvent("click");this.createEvent("fileSelect");this.createEvent("uploadStart");this.createEvent("uploadProgress");this.createEvent("uploadCancel");this.createEvent("uploadComplete");this.createEvent("uploadCompleteData");this.createEvent("uploadError");};YAHOO.widget.Uploader.SWFURL="assets/uploader.swf";YAHOO.extend(YAHOO.widget.Uploader,YAHOO.widget.FlashAdapter,{upload:function(A,B,E,C,D){this._swf.upload(A,B,E,C,D);},uploadAll:function(A,D,B,C){this._swf.uploadAll(A,D,B,C);},cancel:function(A){this._swf.cancel(A);},clearFileList:function(){this._swf.clearFileList();},removeFile:function(A){this._swf.removeFile(A);},setAllowLogging:function(A){this._swf.setAllowLogging(A);},setSimUploadLimit:function(A){this._swf.setSimUploadLimit(A);},setAllowMultipleFiles:function(A){this._swf.setAllowMultipleFiles(A);},setFileFilters:function(A){this._swf.setFileFilters(A);},enable:function(){this._swf.enable();},disable:function(){this._swf.disable();}});YAHOO.register("uploader",YAHOO.widget.Uploader,{version:"2.7.0",build:"1799"});
|
PypiClean
|
/sphinxcontrib-emacs-0.4.tar.gz/sphinxcontrib-emacs-0.4/CHANGES.rst
|
master (in development)
=======================
0.4 (Jan 22, 2018)
==================
- Updates for more recent Sphinx versions
0.3.1 (Dec 20, 2014)
====================
- Fix `cl-slot` role
0.3 (Nov 17, 2014)
==================
- Prevent installation on Python 3 [GH-16]
- Add new `constant` directive for constants
- Add support for `defconst` in autodoc
0.2.1 (Aug 8, 2014)
===================
- Handle backquotes
0.2 (Aug 1, 2014)
=================
- Add `info_xref` configuration value to add additional Info manuals for HTML
cross-referencing
- Fix node name expansion
- Add missing extension to online references
- Fix many issues in Lisp interpretation
0.1 (May 12, 2014)
==================
- Initial release
|
PypiClean
|
/aim-with-auth-3.14.6.tar.gz/aim-with-auth-3.14.6/aim/storage/structured/sql_engine/utils.py
|
from abc import ABCMeta
from typing import Iterator, Collection, TypeVar, Union, Callable
from sqlalchemy import text
try:
from typing import GenericMeta
except ImportError:
class GenericMeta(type):
pass
T = TypeVar('T')
class ModelMappedProperty:
def __init__(self, name: str, mapped_name: str = None,
get_modifier: Callable = None,
with_setter: bool = True,
direct_setter: bool = False,
autogenerate: bool = True):
self.name = name
self.mapped_name = mapped_name or self.name
self.get_modifier = get_modifier
self.with_setter = with_setter
self.direct_setter = direct_setter
self.autogenerate = autogenerate
def generate_property(self):
def getter(object_):
if self.get_modifier:
return self.get_modifier(getattr(object_._model, self.mapped_name)) if object_._model else None
else:
return getattr(object_._model, self.mapped_name) if object_._model else None
setter = None
if self.with_setter or self.direct_setter:
def direct_setter(object_, value):
engine = object_._session.bind
table_name = object_._model.__tablename__
with engine.begin() as conn:
sql = text(f'UPDATE {table_name} SET {self.mapped_name} = :val WHERE id = :id')
conn.execute(sql, {'val': value, 'id': object_._id})
def setter(object_, value):
assert object_._model
try:
setattr(object_._model, self.mapped_name, value)
object_._session.add(object_._model)
if object_._session.autocommit:
object_._session.commit()
except Exception:
direct_setter(object_, value)
if self.direct_setter:
return property(getter, direct_setter)
else:
return property(getter, setter)
class ModelMappedCollection(Collection[T]):
def __init__(self, session, **kwargs):
# TODO: [AT] Find elegant way to check mutually exclusive args
if ('query' not in kwargs and 'collection' not in kwargs) \
or ('query' in kwargs and 'collection' in kwargs):
raise ValueError('Cannot initialize ModelMappedCollection. Please provide \'query\' or \'collection\'.')
self.session = session
self.query = kwargs.get('query')
self._cache = kwargs.get('collection')
def _create_cache(self):
self._it_cls = self.__orig_class__.__args__[0]
if self._cache is None:
self._cache = self.query.all()
def __iter__(self) -> Iterator[T]:
self._create_cache()
self._idx = 0
return self
def __next__(self) -> T:
if self._idx >= len(self._cache):
raise StopIteration
ret = self._it_cls.from_model(self._cache[self._idx], self.session)
self._idx += 1
return ret
def __len__(self):
if self._cache is not None:
return len(self._cache)
else:
return self.query.count()
def __contains__(self, item: Union[T, str]) -> bool:
self._create_cache()
if isinstance(item, str):
match = next((i for i in self._cache if i.name == item), None)
return match is not None
elif isinstance(item, self._it_cls):
match = next((i for i in self._cache if i.id == item._model.id), None)
return match is not None
return False
class ModelMappedClassMeta(GenericMeta, ABCMeta):
__mapping__ = {}
def __new__(mcls, name, bases, namespace, **kwargs):
model = namespace.get('__model__')
mapped_properties = namespace.get('__mapped_properties__')
if not model:
raise TypeError(f'Model-mapped class \'{name}\' attribute \'__model__\' must be set to mapped model.')
if mcls.__mapping__.get(model):
return mcls.__mapping__.get(model)
schema = []
for attribute in mapped_properties:
if not isinstance(attribute, ModelMappedProperty):
raise TypeError(f'Mapped property \'{attribute.name}\' should be of type \'MappedProperty\'.')
schema.append(attribute.name)
if attribute.autogenerate:
namespace[attribute.name] = attribute.generate_property()
namespace['__schema__'] = tuple(schema)
def fields(cls):
return cls.__schema__
namespace['fields'] = classmethod(fields)
type_ = ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
mcls.__mapping__[model] = type_
return type_
|
PypiClean
|
/google-api-python-client-2.97.0.tar.gz/google-api-python-client-2.97.0/googleapiclient/channel.py
|
from __future__ import absolute_import
import datetime
import uuid
from googleapiclient import _helpers as util
from googleapiclient import errors
# The unix time epoch starts at midnight 1970.
EPOCH = datetime.datetime.utcfromtimestamp(0)
# Map the names of the parameters in the JSON channel description to
# the parameter names we use in the Channel class.
CHANNEL_PARAMS = {
"address": "address",
"id": "id",
"expiration": "expiration",
"params": "params",
"resourceId": "resource_id",
"resourceUri": "resource_uri",
"type": "type",
"token": "token",
}
X_GOOG_CHANNEL_ID = "X-GOOG-CHANNEL-ID"
X_GOOG_MESSAGE_NUMBER = "X-GOOG-MESSAGE-NUMBER"
X_GOOG_RESOURCE_STATE = "X-GOOG-RESOURCE-STATE"
X_GOOG_RESOURCE_URI = "X-GOOG-RESOURCE-URI"
X_GOOG_RESOURCE_ID = "X-GOOG-RESOURCE-ID"
def _upper_header_keys(headers):
new_headers = {}
for k, v in headers.items():
new_headers[k.upper()] = v
return new_headers
class Notification(object):
"""A Notification from a Channel.
Notifications are not usually constructed directly, but are returned
from functions like notification_from_headers().
Attributes:
message_number: int, The unique id number of this notification.
state: str, The state of the resource being monitored.
uri: str, The address of the resource being monitored.
resource_id: str, The unique identifier of the version of the resource at
this event.
"""
@util.positional(5)
def __init__(self, message_number, state, resource_uri, resource_id):
"""Notification constructor.
Args:
message_number: int, The unique id number of this notification.
state: str, The state of the resource being monitored. Can be one
of "exists", "not_exists", or "sync".
resource_uri: str, The address of the resource being monitored.
resource_id: str, The identifier of the watched resource.
"""
self.message_number = message_number
self.state = state
self.resource_uri = resource_uri
self.resource_id = resource_id
class Channel(object):
"""A Channel for notifications.
Usually not constructed directly, instead it is returned from helper
functions like new_webhook_channel().
Attributes:
type: str, The type of delivery mechanism used by this channel. For
example, 'web_hook'.
id: str, A UUID for the channel.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each event delivered
over this channel.
address: str, The address of the receiving entity where events are
delivered. Specific to the channel type.
expiration: int, The time, in milliseconds from the epoch, when this
channel will expire.
params: dict, A dictionary of string to string, with additional parameters
controlling delivery channel behavior.
resource_id: str, An opaque id that identifies the resource that is
being watched. Stable across different API versions.
resource_uri: str, The canonicalized ID of the watched resource.
"""
@util.positional(5)
def __init__(
self,
type,
id,
token,
address,
expiration=None,
params=None,
resource_id="",
resource_uri="",
):
"""Create a new Channel.
In user code, this Channel constructor will not typically be called
manually since there are functions for creating channels for each specific
type with a more customized set of arguments to pass.
Args:
type: str, The type of delivery mechanism used by this channel. For
example, 'web_hook'.
id: str, A UUID for the channel.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each event delivered
over this channel.
address: str, The address of the receiving entity where events are
delivered. Specific to the channel type.
expiration: int, The time, in milliseconds from the epoch, when this
channel will expire.
params: dict, A dictionary of string to string, with additional parameters
controlling delivery channel behavior.
resource_id: str, An opaque id that identifies the resource that is
being watched. Stable across different API versions.
resource_uri: str, The canonicalized ID of the watched resource.
"""
self.type = type
self.id = id
self.token = token
self.address = address
self.expiration = expiration
self.params = params
self.resource_id = resource_id
self.resource_uri = resource_uri
def body(self):
"""Build a body from the Channel.
Constructs a dictionary that's appropriate for passing into watch()
methods as the value of body argument.
Returns:
A dictionary representation of the channel.
"""
result = {
"id": self.id,
"token": self.token,
"type": self.type,
"address": self.address,
}
if self.params:
result["params"] = self.params
if self.resource_id:
result["resourceId"] = self.resource_id
if self.resource_uri:
result["resourceUri"] = self.resource_uri
if self.expiration:
result["expiration"] = self.expiration
return result
def update(self, resp):
"""Update a channel with information from the response of watch().
When a request is sent to watch() a resource, the response returned
from the watch() request is a dictionary with updated channel information,
such as the resource_id, which is needed when stopping a subscription.
Args:
resp: dict, The response from a watch() method.
"""
for json_name, param_name in CHANNEL_PARAMS.items():
value = resp.get(json_name)
if value is not None:
setattr(self, param_name, value)
def notification_from_headers(channel, headers):
"""Parse a notification from the webhook request headers, validate
the notification, and return a Notification object.
Args:
channel: Channel, The channel that the notification is associated with.
headers: dict, A dictionary like object that contains the request headers
from the webhook HTTP request.
Returns:
A Notification object.
Raises:
errors.InvalidNotificationError if the notification is invalid.
ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
"""
headers = _upper_header_keys(headers)
channel_id = headers[X_GOOG_CHANNEL_ID]
if channel.id != channel_id:
raise errors.InvalidNotificationError(
"Channel id mismatch: %s != %s" % (channel.id, channel_id)
)
else:
message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
state = headers[X_GOOG_RESOURCE_STATE]
resource_uri = headers[X_GOOG_RESOURCE_URI]
resource_id = headers[X_GOOG_RESOURCE_ID]
return Notification(message_number, state, resource_uri, resource_id)
@util.positional(2)
def new_webhook_channel(url, token=None, expiration=None, params=None):
"""Create a new webhook Channel.
Args:
url: str, URL to post notifications to.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each notification delivered
over this channel.
expiration: datetime.datetime, A time in the future when the channel
should expire. Can also be None if the subscription should use the
default expiration. Note that different services may have different
limits on how long a subscription lasts. Check the response from the
watch() method to see the value the service has set for an expiration
time.
params: dict, Extra parameters to pass on channel creation. Currently
not used for webhook channels.
"""
expiration_ms = 0
if expiration:
delta = expiration - EPOCH
expiration_ms = (
delta.microseconds / 1000 + (delta.seconds + delta.days * 24 * 3600) * 1000
)
if expiration_ms < 0:
expiration_ms = 0
return Channel(
"web_hook",
str(uuid.uuid4()),
token,
url,
expiration=expiration_ms,
params=params,
)
|
PypiClean
|
/datalab_on_jupyter-2.2.1-py3-none-any.whl/datalab_on_jupyter/dist/30-4c457f23095c8e520fbf.bundle.js
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[30],{1745:function(e,n,t){"use strict";t.r(n),t.d(n,"conf",(function(){return s})),t.d(n,"language",(function(){return o}));var s={comments:{lineComment:"//",blockComment:["(*","*)"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],folding:{markers:{start:new RegExp("^\\s*//\\s*#region\\b|^\\s*\\(\\*\\s*#region(.*)\\*\\)"),end:new RegExp("^\\s*//\\s*#endregion\\b|^\\s*\\(\\*\\s*#endregion\\s*\\*\\)")}}},o={defaultToken:"",tokenPostfix:".fs",keywords:["abstract","and","atomic","as","assert","asr","base","begin","break","checked","component","const","constraint","constructor","continue","class","default","delegate","do","done","downcast","downto","elif","else","end","exception","eager","event","external","extern","false","finally","for","fun","function","fixed","functor","global","if","in","include","inherit","inline","interface","internal","land","lor","lsl","lsr","lxor","lazy","let","match","member","mod","module","mutable","namespace","method","mixin","new","not","null","of","open","or","object","override","private","parallel","process","protected","pure","public","rec","return","static","sealed","struct","sig","then","to","true","tailcall","trait","try","type","upcast","use","val","void","virtual","volatile","when","while","with","yield"],symbols:/[=><!~?:&|+\-*\^%;\.,\/]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,integersuffix:/[uU]?[yslnLI]?/,floatsuffix:/[fFmM]?/,tokenizer:{root:[[/[a-zA-Z_]\w*/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}],{include:"@whitespace"},[/\[<.*>\]/,"annotation"],[/^#(if|else|endif)/,"keyword"],[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,"delimiter"],[/\d*\d+[eE]([\-+]?\d+)?(@floatsuffix)/,"number.float"],[/\d*\.\d+([eE][\-+]?\d+)?(@floatsuffix)/,"number.float"],[/0x[0-9a-fA-F]+LF/,"number.float"],[/0x[0-9a-fA-F]+(@integersuffix)/,"number.hex"],[/0b[0-1]+(@integersuffix)/,"number.bin"],[/\d+(@integersuffix)/,"number"],[/[;,.]/,"delimiter"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/"""/,"string",'@string."""'],[/"/,"string",'@string."'],[/\@"/,{token:"string.quote",next:"@litstring"}],[/'[^\\']'B?/,"string"],[/(')(@escapes)(')/,["string","string.escape","string"]],[/'/,"string.invalid"]],whitespace:[[/[ \t\r\n]+/,""],[/\(\*(?!\))/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^*(]+/,"comment"],[/\*\)/,"comment","@pop"],[/\*/,"comment"],[/\(\*\)/,"comment"],[/\(/,"comment"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/("""|"B?)/,{cases:{"$#==$S2":{token:"string",next:"@pop"},"@default":"string"}}]],litstring:[[/[^"]+/,"string"],[/""/,"string.escape"],[/"/,{token:"string.quote",next:"@pop"}]]}}}}]);
|
PypiClean
|
/rmap-7.5.tar.gz/rmap-7.5/rainbo/static/rainbo/js/lib/jqBootstrapValidation.js
|
(function( $ ){
var createdElements = [];
var defaults = {
options: {
prependExistingHelpBlock: false,
sniffHtml: true, // sniff for 'required', 'maxlength', etc
preventSubmit: true, // stop the form submit event from firing if validation fails
submitError: false, // function called if there is an error when trying to submit
submitSuccess: false, // function called just before a successful submit event is sent to the server
semanticallyStrict: false, // set to true to tidy up generated HTML output
autoAdd: {
helpBlocks: true
},
filter: function () {
// return $(this).is(":visible"); // only validate elements you can see
return true; // validate everything
}
},
methods: {
init : function( options ) {
var settings = $.extend(true, {}, defaults);
settings.options = $.extend(true, settings.options, options);
var $siblingElements = this;
var uniqueForms = $.unique(
$siblingElements.map( function () {
return $(this).parents("form")[0];
}).toArray()
);
$(uniqueForms).bind("submit", function (e) {
var $form = $(this);
var warningsFound = 0;
var $inputs = $form.find("input,textarea,select").not("[type=submit],[type=image]").filter(settings.options.filter);
$inputs.trigger("submit.validation").trigger("validationLostFocus.validation");
$inputs.each(function (i, el) {
var $this = $(el),
$controlGroup = $this.parents(".control-group").first();
if (
$controlGroup.hasClass("warning")
) {
$controlGroup.removeClass("warning").addClass("error");
warningsFound++;
}
});
$inputs.trigger("validationLostFocus.validation");
if (warningsFound) {
if (settings.options.preventSubmit) {
e.preventDefault();
}
$form.addClass("error");
if ($.isFunction(settings.options.submitError)) {
settings.options.submitError($form, e, $inputs.jqBootstrapValidation("collectErrors", true));
}
} else {
$form.removeClass("error");
if ($.isFunction(settings.options.submitSuccess)) {
settings.options.submitSuccess($form, e);
}
}
});
return this.each(function(){
// Get references to everything we're interested in
var $this = $(this),
$controlGroup = $this.parents(".control-group").first(),
$helpBlock = $controlGroup.find(".help-block").first(),
$form = $this.parents("form").first(),
validatorNames = [];
// create message container if not exists
if (!$helpBlock.length && settings.options.autoAdd && settings.options.autoAdd.helpBlocks) {
$helpBlock = $('<div class="help-block" />');
$controlGroup.find('.controls').append($helpBlock);
createdElements.push($helpBlock[0]);
}
// =============================================================
// SNIFF HTML FOR VALIDATORS
// =============================================================
// *snort sniff snuffle*
if (settings.options.sniffHtml) {
var message = "";
// ---------------------------------------------------------
// PATTERN
// ---------------------------------------------------------
if ($this.attr("pattern") !== undefined) {
message = "Not in the expected format<!-- data-validation-pattern-message to override -->";
if ($this.data("validationPatternMessage")) {
message = $this.data("validationPatternMessage");
}
$this.data("validationPatternMessage", message);
$this.data("validationPatternRegex", $this.attr("pattern"));
}
// ---------------------------------------------------------
// MAX
// ---------------------------------------------------------
if ($this.attr("max") !== undefined || $this.attr("aria-valuemax") !== undefined) {
var max = ($this.attr("max") !== undefined ? $this.attr("max") : $this.attr("aria-valuemax"));
message = "Too high: Maximum of '" + max + "'<!-- data-validation-max-message to override -->";
if ($this.data("validationMaxMessage")) {
message = $this.data("validationMaxMessage");
}
$this.data("validationMaxMessage", message);
$this.data("validationMaxMax", max);
}
// ---------------------------------------------------------
// MIN
// ---------------------------------------------------------
if ($this.attr("min") !== undefined || $this.attr("aria-valuemin") !== undefined) {
var min = ($this.attr("min") !== undefined ? $this.attr("min") : $this.attr("aria-valuemin"));
message = "Too low: Minimum of '" + min + "'<!-- data-validation-min-message to override -->";
if ($this.data("validationMinMessage")) {
message = $this.data("validationMinMessage");
}
$this.data("validationMinMessage", message);
$this.data("validationMinMin", min);
}
// ---------------------------------------------------------
// MAXLENGTH
// ---------------------------------------------------------
if ($this.attr("maxlength") !== undefined) {
message = "Too long: Maximum of '" + $this.attr("maxlength") + "' characters<!-- data-validation-maxlength-message to override -->";
if ($this.data("validationMaxlengthMessage")) {
message = $this.data("validationMaxlengthMessage");
}
$this.data("validationMaxlengthMessage", message);
$this.data("validationMaxlengthMaxlength", $this.attr("maxlength"));
}
// ---------------------------------------------------------
// MINLENGTH
// ---------------------------------------------------------
if ($this.attr("minlength") !== undefined) {
message = "Too short: Minimum of '" + $this.attr("minlength") + "' characters<!-- data-validation-minlength-message to override -->";
if ($this.data("validationMinlengthMessage")) {
message = $this.data("validationMinlengthMessage");
}
$this.data("validationMinlengthMessage", message);
$this.data("validationMinlengthMinlength", $this.attr("minlength"));
}
// ---------------------------------------------------------
// REQUIRED
// ---------------------------------------------------------
if ($this.attr("required") !== undefined || $this.attr("aria-required") !== undefined) {
message = settings.builtInValidators.required.message;
if ($this.data("validationRequiredMessage")) {
message = $this.data("validationRequiredMessage");
}
$this.data("validationRequiredMessage", message);
}
// ---------------------------------------------------------
// NUMBER
// ---------------------------------------------------------
if ($this.attr("type") !== undefined && $this.attr("type").toLowerCase() === "number") {
message = settings.builtInValidators.number.message;
if ($this.data("validationNumberMessage")) {
message = $this.data("validationNumberMessage");
}
$this.data("validationNumberMessage", message);
}
// ---------------------------------------------------------
// EMAIL
// ---------------------------------------------------------
if ($this.attr("type") !== undefined && $this.attr("type").toLowerCase() === "email") {
message = "Not a valid email address<!-- data-validator-validemail-message to override -->";
if ($this.data("validationValidemailMessage")) {
message = $this.data("validationValidemailMessage");
} else if ($this.data("validationEmailMessage")) {
message = $this.data("validationEmailMessage");
}
$this.data("validationValidemailMessage", message);
}
// ---------------------------------------------------------
// MINCHECKED
// ---------------------------------------------------------
if ($this.attr("minchecked") !== undefined) {
message = "Not enough options checked; Minimum of '" + $this.attr("minchecked") + "' required<!-- data-validation-minchecked-message to override -->";
if ($this.data("validationMincheckedMessage")) {
message = $this.data("validationMincheckedMessage");
}
$this.data("validationMincheckedMessage", message);
$this.data("validationMincheckedMinchecked", $this.attr("minchecked"));
}
// ---------------------------------------------------------
// MAXCHECKED
// ---------------------------------------------------------
if ($this.attr("maxchecked") !== undefined) {
message = "Too many options checked; Maximum of '" + $this.attr("maxchecked") + "' required<!-- data-validation-maxchecked-message to override -->";
if ($this.data("validationMaxcheckedMessage")) {
message = $this.data("validationMaxcheckedMessage");
}
$this.data("validationMaxcheckedMessage", message);
$this.data("validationMaxcheckedMaxchecked", $this.attr("maxchecked"));
}
}
// =============================================================
// COLLECT VALIDATOR NAMES
// =============================================================
// Get named validators
if ($this.data("validation") !== undefined) {
validatorNames = $this.data("validation").split(",");
}
// Get extra ones defined on the element's data attributes
$.each($this.data(), function (i, el) {
var parts = i.replace(/([A-Z])/g, ",$1").split(",");
if (parts[0] === "validation" && parts[1]) {
validatorNames.push(parts[1]);
}
});
// =============================================================
// NORMALISE VALIDATOR NAMES
// =============================================================
var validatorNamesToInspect = validatorNames;
var newValidatorNamesToInspect = [];
do // repeatedly expand 'shortcut' validators into their real validators
{
// Uppercase only the first letter of each name
$.each(validatorNames, function (i, el) {
validatorNames[i] = formatValidatorName(el);
});
// Remove duplicate validator names
validatorNames = $.unique(validatorNames);
// Pull out the new validator names from each shortcut
newValidatorNamesToInspect = [];
$.each(validatorNamesToInspect, function(i, el) {
if ($this.data("validation" + el + "Shortcut") !== undefined) {
// Are these custom validators?
// Pull them out!
$.each($this.data("validation" + el + "Shortcut").split(","), function(i2, el2) {
newValidatorNamesToInspect.push(el2);
});
} else if (settings.builtInValidators[el.toLowerCase()]) {
// Is this a recognised built-in?
// Pull it out!
var validator = settings.builtInValidators[el.toLowerCase()];
if (validator.type.toLowerCase() === "shortcut") {
$.each(validator.shortcut.split(","), function (i, el) {
el = formatValidatorName(el);
newValidatorNamesToInspect.push(el);
validatorNames.push(el);
});
}
}
});
validatorNamesToInspect = newValidatorNamesToInspect;
} while (validatorNamesToInspect.length > 0)
// =============================================================
// SET UP VALIDATOR ARRAYS
// =============================================================
var validators = {};
$.each(validatorNames, function (i, el) {
// Set up the 'override' message
var message = $this.data("validation" + el + "Message");
var hasOverrideMessage = (message !== undefined);
var foundValidator = false;
message =
(
message
? message
: "'" + el + "' validation failed <!-- Add attribute 'data-validation-" + el.toLowerCase() + "-message' to input to change this message -->"
)
;
$.each(
settings.validatorTypes,
function (validatorType, validatorTemplate) {
if (validators[validatorType] === undefined) {
validators[validatorType] = [];
}
if (!foundValidator && $this.data("validation" + el + formatValidatorName(validatorTemplate.name)) !== undefined) {
validators[validatorType].push(
$.extend(
true,
{
name: formatValidatorName(validatorTemplate.name),
message: message
},
validatorTemplate.init($this, el)
)
);
foundValidator = true;
}
}
);
if (!foundValidator && settings.builtInValidators[el.toLowerCase()]) {
var validator = $.extend(true, {}, settings.builtInValidators[el.toLowerCase()]);
if (hasOverrideMessage) {
validator.message = message;
}
var validatorType = validator.type.toLowerCase();
if (validatorType === "shortcut") {
foundValidator = true;
} else {
$.each(
settings.validatorTypes,
function (validatorTemplateType, validatorTemplate) {
if (validators[validatorTemplateType] === undefined) {
validators[validatorTemplateType] = [];
}
if (!foundValidator && validatorType === validatorTemplateType.toLowerCase()) {
$this.data("validation" + el + formatValidatorName(validatorTemplate.name), validator[validatorTemplate.name.toLowerCase()]);
validators[validatorType].push(
$.extend(
validator,
validatorTemplate.init($this, el)
)
);
foundValidator = true;
}
}
);
}
}
if (! foundValidator) {
$.error("Cannot find validation info for '" + el + "'");
}
});
// =============================================================
// STORE FALLBACK VALUES
// =============================================================
$helpBlock.data(
"original-contents",
(
$helpBlock.data("original-contents")
? $helpBlock.data("original-contents")
: $helpBlock.html()
)
);
$helpBlock.data(
"original-role",
(
$helpBlock.data("original-role")
? $helpBlock.data("original-role")
: $helpBlock.attr("role")
)
);
$controlGroup.data(
"original-classes",
(
$controlGroup.data("original-clases")
? $controlGroup.data("original-classes")
: $controlGroup.attr("class")
)
);
$this.data(
"original-aria-invalid",
(
$this.data("original-aria-invalid")
? $this.data("original-aria-invalid")
: $this.attr("aria-invalid")
)
);
// =============================================================
// VALIDATION
// =============================================================
$this.bind(
"validation.validation",
function (event, params) {
var value = getValue($this);
// Get a list of the errors to apply
var errorsFound = [];
$.each(validators, function (validatorType, validatorTypeArray) {
if (value || value.length || (params && params.includeEmpty) || (!!settings.validatorTypes[validatorType].blockSubmit && params && !!params.submitting)) {
$.each(validatorTypeArray, function (i, validator) {
if (settings.validatorTypes[validatorType].validate($this, value, validator)) {
errorsFound.push(validator.message);
}
});
}
});
return errorsFound;
}
);
$this.bind(
"getValidators.validation",
function () {
return validators;
}
);
// =============================================================
// WATCH FOR CHANGES
// =============================================================
$this.bind(
"submit.validation",
function () {
return $this.triggerHandler("change.validation", {submitting: true});
}
);
$this.bind(
[
"keyup",
"focus",
"blur",
"click",
"keydown",
"keypress",
"change"
].join(".validation ") + ".validation",
function (e, params) {
var value = getValue($this);
var errorsFound = [];
$controlGroup.find("input,textarea,select").each(function (i, el) {
var oldCount = errorsFound.length;
$.each($(el).triggerHandler("validation.validation", params), function (j, message) {
errorsFound.push(message);
});
if (errorsFound.length > oldCount) {
$(el).attr("aria-invalid", "true");
} else {
var original = $this.data("original-aria-invalid");
$(el).attr("aria-invalid", (original !== undefined ? original : false));
}
});
$form.find("input,select,textarea").not($this).not("[name=\"" + $this.attr("name") + "\"]").trigger("validationLostFocus.validation");
errorsFound = $.unique(errorsFound.sort());
// Were there any errors?
if (errorsFound.length) {
// Better flag it up as a warning.
$controlGroup.removeClass("success error").addClass("warning");
// How many errors did we find?
if (settings.options.semanticallyStrict && errorsFound.length === 1) {
// Only one? Being strict? Just output it.
$helpBlock.html(errorsFound[0] +
( settings.options.prependExistingHelpBlock ? $helpBlock.data("original-contents") : "" ));
} else {
// Multiple? Being sloppy? Glue them together into an UL.
$helpBlock.html("<ul role=\"alert\"><li>" + errorsFound.join("</li><li>") + "</li></ul>" +
( settings.options.prependExistingHelpBlock ? $helpBlock.data("original-contents") : "" ));
}
} else {
$controlGroup.removeClass("warning error success");
if (value.length > 0) {
$controlGroup.addClass("success");
}
$helpBlock.html($helpBlock.data("original-contents"));
}
if (e.type === "blur") {
$controlGroup.removeClass("success");
}
}
);
$this.bind("validationLostFocus.validation", function () {
$controlGroup.removeClass("success");
});
});
},
destroy : function( ) {
return this.each(
function() {
var
$this = $(this),
$controlGroup = $this.parents(".control-group").first(),
$helpBlock = $controlGroup.find(".help-block").first();
// remove our events
$this.unbind('.validation'); // events are namespaced.
// reset help text
$helpBlock.html($helpBlock.data("original-contents"));
// reset classes
$controlGroup.attr("class", $controlGroup.data("original-classes"));
// reset aria
$this.attr("aria-invalid", $this.data("original-aria-invalid"));
// reset role
$helpBlock.attr("role", $this.data("original-role"));
// remove all elements we created
if (createdElements.indexOf($helpBlock[0]) > -1) {
$helpBlock.remove();
}
}
);
},
collectErrors : function(includeEmpty) {
var errorMessages = {};
this.each(function (i, el) {
var $el = $(el);
var name = $el.attr("name");
var errors = $el.triggerHandler("validation.validation", {includeEmpty: true});
errorMessages[name] = $.extend(true, errors, errorMessages[name]);
});
$.each(errorMessages, function (i, el) {
if (el.length === 0) {
delete errorMessages[i];
}
});
return errorMessages;
},
hasErrors: function() {
var errorMessages = [];
this.each(function (i, el) {
errorMessages = errorMessages.concat(
$(el).triggerHandler("getValidators.validation") ? $(el).triggerHandler("validation.validation", {submitting: true}) : []
);
});
return (errorMessages.length > 0);
},
override : function (newDefaults) {
defaults = $.extend(true, defaults, newDefaults);
}
},
validatorTypes: {
callback: {
name: "callback",
init: function ($this, name) {
return {
validatorName: name,
callback: $this.data("validation" + name + "Callback"),
lastValue: $this.val(),
lastValid: true,
lastFinished: true
};
},
validate: function ($this, value, validator) {
if (validator.lastValue === value && validator.lastFinished) {
return !validator.lastValid;
}
if (validator.lastFinished === true)
{
validator.lastValue = value;
validator.lastValid = true;
validator.lastFinished = false;
var rrjqbvValidator = validator;
var rrjqbvThis = $this;
executeFunctionByName(
validator.callback,
window,
$this,
value,
function (data) {
if (rrjqbvValidator.lastValue === data.value) {
rrjqbvValidator.lastValid = data.valid;
if (data.message) {
rrjqbvValidator.message = data.message;
}
rrjqbvValidator.lastFinished = true;
rrjqbvThis.data("validation" + rrjqbvValidator.validatorName + "Message", rrjqbvValidator.message);
// Timeout is set to avoid problems with the events being considered 'already fired'
setTimeout(function () {
rrjqbvThis.trigger("change.validation");
}, 1); // doesn't need a long timeout, just long enough for the event bubble to burst
}
}
);
}
return false;
}
},
ajax: {
name: "ajax",
init: function ($this, name) {
return {
validatorName: name,
url: $this.data("validation" + name + "Ajax"),
lastValue: $this.val(),
lastValid: true,
lastFinished: true
};
},
validate: function ($this, value, validator) {
if (""+validator.lastValue === ""+value && validator.lastFinished === true) {
return validator.lastValid === false;
}
if (validator.lastFinished === true)
{
validator.lastValue = value;
validator.lastValid = true;
validator.lastFinished = false;
$.ajax({
url: validator.url,
data: "value=" + value + "&field=" + $this.attr("name"),
dataType: "json",
success: function (data) {
if (""+validator.lastValue === ""+data.value) {
validator.lastValid = !!(data.valid);
if (data.message) {
validator.message = data.message;
}
validator.lastFinished = true;
$this.data("validation" + validator.validatorName + "Message", validator.message);
// Timeout is set to avoid problems with the events being considered 'already fired'
setTimeout(function () {
$this.trigger("change.validation");
}, 1); // doesn't need a long timeout, just long enough for the event bubble to burst
}
},
failure: function () {
validator.lastValid = true;
validator.message = "ajax call failed";
validator.lastFinished = true;
$this.data("validation" + validator.validatorName + "Message", validator.message);
// Timeout is set to avoid problems with the events being considered 'already fired'
setTimeout(function () {
$this.trigger("change.validation");
}, 1); // doesn't need a long timeout, just long enough for the event bubble to burst
}
});
}
return false;
}
},
regex: {
name: "regex",
init: function ($this, name) {
return {regex: regexFromString($this.data("validation" + name + "Regex"))};
},
validate: function ($this, value, validator) {
return (!validator.regex.test(value) && ! validator.negative)
|| (validator.regex.test(value) && validator.negative);
}
},
required: {
name: "required",
init: function ($this, name) {
return {};
},
validate: function ($this, value, validator) {
return !!(value.length === 0 && ! validator.negative)
|| !!(value.length > 0 && validator.negative);
},
blockSubmit: true
},
match: {
name: "match",
init: function ($this, name) {
var element = $this.parents("form").first().find("[name=\"" + $this.data("validation" + name + "Match") + "\"]").first();
element.bind("validation.validation", function () {
$this.trigger("change.validation", {submitting: true});
});
return {"element": element};
},
validate: function ($this, value, validator) {
return (value !== validator.element.val() && ! validator.negative)
|| (value === validator.element.val() && validator.negative);
},
blockSubmit: true
},
max: {
name: "max",
init: function ($this, name) {
return {max: $this.data("validation" + name + "Max")};
},
validate: function ($this, value, validator) {
return (parseFloat(value, 10) > parseFloat(validator.max, 10) && ! validator.negative)
|| (parseFloat(value, 10) <= parseFloat(validator.max, 10) && validator.negative);
}
},
min: {
name: "min",
init: function ($this, name) {
return {min: $this.data("validation" + name + "Min")};
},
validate: function ($this, value, validator) {
return (parseFloat(value) < parseFloat(validator.min) && ! validator.negative)
|| (parseFloat(value) >= parseFloat(validator.min) && validator.negative);
}
},
maxlength: {
name: "maxlength",
init: function ($this, name) {
return {maxlength: $this.data("validation" + name + "Maxlength")};
},
validate: function ($this, value, validator) {
return ((value.length > validator.maxlength) && ! validator.negative)
|| ((value.length <= validator.maxlength) && validator.negative);
}
},
minlength: {
name: "minlength",
init: function ($this, name) {
return {minlength: $this.data("validation" + name + "Minlength")};
},
validate: function ($this, value, validator) {
return ((value.length < validator.minlength) && ! validator.negative)
|| ((value.length >= validator.minlength) && validator.negative);
}
},
maxchecked: {
name: "maxchecked",
init: function ($this, name) {
var elements = $this.parents("form").first().find("[name=\"" + $this.attr("name") + "\"]");
elements.bind("click.validation", function () {
$this.trigger("change.validation", {includeEmpty: true});
});
return {maxchecked: $this.data("validation" + name + "Maxchecked"), elements: elements};
},
validate: function ($this, value, validator) {
return (validator.elements.filter(":checked").length > validator.maxchecked && ! validator.negative)
|| (validator.elements.filter(":checked").length <= validator.maxchecked && validator.negative);
},
blockSubmit: true
},
minchecked: {
name: "minchecked",
init: function ($this, name) {
var elements = $this.parents("form").first().find("[name=\"" + $this.attr("name") + "\"]");
elements.bind("click.validation", function () {
$this.trigger("change.validation", {includeEmpty: true});
});
return {minchecked: $this.data("validation" + name + "Minchecked"), elements: elements};
},
validate: function ($this, value, validator) {
return (validator.elements.filter(":checked").length < validator.minchecked && ! validator.negative)
|| (validator.elements.filter(":checked").length >= validator.minchecked && validator.negative);
},
blockSubmit: true
}
},
builtInValidators: {
email: {
name: "Email",
type: "shortcut",
shortcut: "validemail"
},
validemail: {
name: "Validemail",
type: "regex",
regex: "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\\.[A-Za-z]{2,4}",
message: "Not a valid email address<!-- data-validator-validemail-message to override -->"
},
passwordagain: {
name: "Passwordagain",
type: "match",
match: "password",
message: "Does not match the given password<!-- data-validator-paswordagain-message to override -->"
},
positive: {
name: "Positive",
type: "shortcut",
shortcut: "number,positivenumber"
},
negative: {
name: "Negative",
type: "shortcut",
shortcut: "number,negativenumber"
},
number: {
name: "Number",
type: "regex",
regex: "([+-]?\\\d+(\\\.\\\d*)?([eE][+-]?[0-9]+)?)?",
message: "Must be a number<!-- data-validator-number-message to override -->"
},
integer: {
name: "Integer",
type: "regex",
regex: "[+-]?\\\d+",
message: "No decimal places allowed<!-- data-validator-integer-message to override -->"
},
positivenumber: {
name: "Positivenumber",
type: "min",
min: 0,
message: "Must be a positive number<!-- data-validator-positivenumber-message to override -->"
},
negativenumber: {
name: "Negativenumber",
type: "max",
max: 0,
message: "Must be a negative number<!-- data-validator-negativenumber-message to override -->"
},
required: {
name: "Required",
type: "required",
message: "This is required<!-- data-validator-required-message to override -->"
},
checkone: {
name: "Checkone",
type: "minchecked",
minchecked: 1,
message: "Check at least one option<!-- data-validation-checkone-message to override -->"
}
}
};
var formatValidatorName = function (name) {
return name
.toLowerCase()
.replace(
/(^|\s)([a-z])/g ,
function(m,p1,p2) {
return p1+p2.toUpperCase();
}
)
;
};
var getValue = function ($this) {
// Extract the value we're talking about
var value = $this.val();
var type = $this.attr("type");
if (type === "checkbox") {
value = ($this.is(":checked") ? value : "");
}
if (type === "radio") {
value = ($('input[name="' + $this.attr("name") + '"]:checked').length > 0 ? value : "");
}
return value;
};
function regexFromString(inputstring) {
return new RegExp("^" + inputstring + "$");
}
/**
* Thanks to Jason Bunting via StackOverflow.com
*
* http://stackoverflow.com/questions/359788/how-to-execute-a-javascript-function-when-i-have-its-name-as-a-string#answer-359910
* Short link: http://tinyurl.com/executeFunctionByName
**/
function executeFunctionByName(functionName, context /*, args*/) {
var args = Array.prototype.slice.call(arguments).splice(2);
var namespaces = functionName.split(".");
var func = namespaces.pop();
for(var i = 0; i < namespaces.length; i++) {
context = context[namespaces[i]];
}
return context[func].apply(this, args);
}
$.fn.jqBootstrapValidation = function( method ) {
if ( defaults.methods[method] ) {
return defaults.methods[method].apply( this, Array.prototype.slice.call( arguments, 1 ));
} else if ( typeof method === 'object' || ! method ) {
return defaults.methods.init.apply( this, arguments );
} else {
$.error( 'Method ' + method + ' does not exist on jQuery.jqBootstrapValidation' );
return null;
}
};
$.jqBootstrapValidation = function (options) {
$(":input").not("[type=image],[type=submit]").jqBootstrapValidation.apply(this,arguments);
};
})( jQuery );
|
PypiClean
|
/jupyterhub-sdp-0.9.0.1.tar.gz/jupyterhub-sdp-0.9.0.1/share/jupyterhub/static/components/moment/locale/be.js
|
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
function plural(word, num) {
var forms = word.split('_');
return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]);
}
function relativeTimeWithPlural(number, withoutSuffix, key) {
var format = {
'ss': withoutSuffix ? 'секунда_секунды_секунд' : 'секунду_секунды_секунд',
'mm': withoutSuffix ? 'хвіліна_хвіліны_хвілін' : 'хвіліну_хвіліны_хвілін',
'hh': withoutSuffix ? 'гадзіна_гадзіны_гадзін' : 'гадзіну_гадзіны_гадзін',
'dd': 'дзень_дні_дзён',
'MM': 'месяц_месяцы_месяцаў',
'yy': 'год_гады_гадоў'
};
if (key === 'm') {
return withoutSuffix ? 'хвіліна' : 'хвіліну';
}
else if (key === 'h') {
return withoutSuffix ? 'гадзіна' : 'гадзіну';
}
else {
return number + ' ' + plural(format[key], +number);
}
}
var be = moment.defineLocale('be', {
months : {
format: 'студзеня_лютага_сакавіка_красавіка_траўня_чэрвеня_ліпеня_жніўня_верасня_кастрычніка_лістапада_снежня'.split('_'),
standalone: 'студзень_люты_сакавік_красавік_травень_чэрвень_ліпень_жнівень_верасень_кастрычнік_лістапад_снежань'.split('_')
},
monthsShort : 'студ_лют_сак_крас_трав_чэрв_ліп_жнів_вер_каст_ліст_снеж'.split('_'),
weekdays : {
format: 'нядзелю_панядзелак_аўторак_сераду_чацвер_пятніцу_суботу'.split('_'),
standalone: 'нядзеля_панядзелак_аўторак_серада_чацвер_пятніца_субота'.split('_'),
isFormat: /\[ ?[Вв] ?(?:мінулую|наступную)? ?\] ?dddd/
},
weekdaysShort : 'нд_пн_ат_ср_чц_пт_сб'.split('_'),
weekdaysMin : 'нд_пн_ат_ср_чц_пт_сб'.split('_'),
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD.MM.YYYY',
LL : 'D MMMM YYYY г.',
LLL : 'D MMMM YYYY г., HH:mm',
LLLL : 'dddd, D MMMM YYYY г., HH:mm'
},
calendar : {
sameDay: '[Сёння ў] LT',
nextDay: '[Заўтра ў] LT',
lastDay: '[Учора ў] LT',
nextWeek: function () {
return '[У] dddd [ў] LT';
},
lastWeek: function () {
switch (this.day()) {
case 0:
case 3:
case 5:
case 6:
return '[У мінулую] dddd [ў] LT';
case 1:
case 2:
case 4:
return '[У мінулы] dddd [ў] LT';
}
},
sameElse: 'L'
},
relativeTime : {
future : 'праз %s',
past : '%s таму',
s : 'некалькі секунд',
m : relativeTimeWithPlural,
mm : relativeTimeWithPlural,
h : relativeTimeWithPlural,
hh : relativeTimeWithPlural,
d : 'дзень',
dd : relativeTimeWithPlural,
M : 'месяц',
MM : relativeTimeWithPlural,
y : 'год',
yy : relativeTimeWithPlural
},
meridiemParse: /ночы|раніцы|дня|вечара/,
isPM : function (input) {
return /^(дня|вечара)$/.test(input);
},
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return 'ночы';
} else if (hour < 12) {
return 'раніцы';
} else if (hour < 17) {
return 'дня';
} else {
return 'вечара';
}
},
dayOfMonthOrdinalParse: /\d{1,2}-(і|ы|га)/,
ordinal: function (number, period) {
switch (period) {
case 'M':
case 'd':
case 'DDD':
case 'w':
case 'W':
return (number % 10 === 2 || number % 10 === 3) && (number % 100 !== 12 && number % 100 !== 13) ? number + '-і' : number + '-ы';
case 'D':
return number + '-га';
default:
return number;
}
},
week : {
dow : 1, // Monday is the first day of the week.
doy : 7 // The week that contains Jan 1st is the first week of the year.
}
});
return be;
})));
|
PypiClean
|
/wj_analysis-0.8.2.tar.gz/wj_analysis-0.8.2/wj_analysis/facebook/polarity_distribution.py
|
import sys
from copy import deepcopy
import pandas as pd
from ..common import general_utils
from ..common.nlp_utils import CleanText, Features, Polarity # , Polarity2
ERR_SYS = "\nSystem error: "
def get_name(pid, dict_page_id_to_name):
try:
out_name = dict_page_id_to_name[pid]
except Exception:
out_name = "no_name"
return out_name
class PolarityDistributionFB:
"""
This class computes the polarity of the texts.
"""
def __init__(self, df_comments, df_pages, groups, r_group=False):
"""
This method stores the input DataFrames and checks that they are not empty'.
Parameters
----------
df_comments:
type: DataFrame
Information of the comments.
This Pandas DataFrame must have columns 'post_id', 'message' and 'page_id'.
df_pages:
type: DataFrame
Information of the pages.
This Pandas DataFrame must have columns 'page_id' and 'name'.
It is used just to set the page name in the DataFrame 'df_comments'. That page name corresponds of the page of the post to wich the comment is asociated.
groups:
type: dict
Maps the groups (client, competition, archetype, trends) to the
corresponding page ids for each group.
r_group:
type: bool
varibale that indicates if the class return the column group
"""
METHOD_NAME = "__init__"
df_comments_full = deepcopy(df_comments)
self.df_comments_full = df_comments_full
self.df_pages = df_pages
self.groups = groups
self.r_group = r_group
try:
if df_comments.empty:
print("Warning: input data DataFrame is empty.")
except Exception as e:
print(e)
error_1 = sys.exc_info()[0]
print(ERR_SYS + str(error_1))
print(f"Class: {self.__str__()}\nMethod: {METHOD_NAME}")
self.df_comments_full = pd.DataFrame(columns=[""])
try:
if df_pages.empty:
print("Warning: page names DataFrame is empty.")
except Exception as e:
print(e)
error_1 = sys.exc_info()[0]
print(ERR_SYS + str(error_1))
print(f"Class: {self.__str__()}\nMethod: {METHOD_NAME}")
self.df_pages = pd.DataFrame(columns=[""])
def get_polarity(self):
"""
This method cleans the text in the comments and get its polarity.
"""
METHOD_NAME = "get_polarity"
try:
# cleaning text:
if "processed_text" not in self.df_comments_full.keys():
self.df_comments_full["processed_text"] = self.df_comments_full[
"message"
].apply(
lambda msg: CleanText(msg).process_text(
mentions=True, hashtags=True, links=True, spec_chars=True
)
)
# drop empty comments
self.df_comments_full = self.df_comments_full.dropna(
subset=["processed_text"]
)
self.df_comments_full = self.df_comments_full.drop(
self.df_comments_full[
self.df_comments_full["processed_text"] == ""
].index
)
# getting the polarity of the clean text
if (
"polarity" not in self.df_comments_full.keys()
or None in self.df_comments_full.polarity.values
):
self.df_comments_full = Polarity().polarity(self.df_comments_full)
df_comments = self.df_comments_full
df_comments = df_comments.dropna(subset=["polarity"])
return df_comments
except Exception as e:
print(e)
error_1 = sys.exc_info()[0]
print(ERR_SYS + str(error_1))
print(f"Class: {self.__str__()}\nMethod: {METHOD_NAME}")
self.df_comments_full["processed_text"] = ""
self.df_comments_full["polarity"] = ""
def grouped_polarities(self, group_by="group"):
"""
This method get the text's polarity using the method get polarity. It then groups the texts and their polarity. It returns a data frame with number of texts in each polarity category for each group.
Parameters
----------
group_by:
type: string
takes four possible values: 'account' when the texts are grouped by individual facebook account, 'group' when the texts are grouped by the pre-defined (in the df_groups data frame) group categories, 'all' when only one group contains all the analysed texts, and 'STTM_group' when the grouping is done using the clustering categories from an sttm algorithm. if no grouping information is found the default is "all"
Returns
df_groupedpolarity:
Pandas DataFrame with with number of texts in each polarity category for each group.
----------
"""
METHOD_NAME = "grouped_polarities"
if (
"polarity" not in self.df_comments_full.keys()
or None in self.df_comments_full.polarity.values
):
self.get_polarity()
try:
if "post_id" in self.df_comments_full.keys():
self.df_comments_full["page_id"] = self.df_comments_full.post_id.apply(
lambda x: str(x).split("_")[0]
)
page_id_name_fb = {}
for idd, row in self.df_pages.iterrows():
page_id_name_fb[row.page_id] = row["name"]
self.df_comments_full[
"page_name"
] = self.df_comments_full.page_id.apply(
lambda pid: get_name(pid, page_id_name_fb)
)
self.df_comments_full["group"] = self.df_comments_full["page_id"].apply(
lambda pid: general_utils.get_group(pid, self.groups)
)
else:
page_id_name_fb = {}
for idd, row in self.df_pages.iterrows():
page_id_name_fb[row.page_id] = row["name"]
self.df_comments_full[
"page_name"
] = self.df_comments_full.page_id.apply(
lambda pid: get_name(pid, page_id_name_fb)
)
self.df_comments_full["group"] = self.df_comments_full["page_id"].apply(
lambda pid: general_utils.get_group(pid, self.groups)
)
self.df_comments_full["all"] = "all groups"
except Exception as e:
print(e)
error_1 = sys.exc_info()[0]
print(ERR_SYS + str(error_1))
print(f"Class: {self.__str__()}\nMethod: {METHOD_NAME}")
if "page_name" not in self.df_comments_full.keys():
self.df_comments_full["page_name"] = "no name"
if "group" not in self.df_comments_full.keys():
self.df_comments_full["group"] = "no group"
self.df_comments_full["all"] = "all groups"
self.df_comments_full = self.df_comments_full.dropna(subset=["polarity"])
# groups texts and returns data frame
try:
if group_by == "post":
df_groupedpolarity = (
self.df_comments_full.groupby(
["post_id", "page_id", "page_name", "group"]
)["polarity"]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
df_groupedpolarity = df_groupedpolarity.reset_index().rename(
columns={"page_id": "_object_id", "page_name": "_object_name"}
)
return df_groupedpolarity[
["post_id", "_object_id", "_object_name", "group", "sentiment"]
]
elif group_by == "account":
df_groupedpolarity = (
self.df_comments_full.groupby(["page_id", "page_name", "group"])[
"polarity"
]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
df_groupedpolarity = df_groupedpolarity.reset_index().rename(
columns={"page_id": "_object_id", "page_name": "_object_name"}
)
return df_groupedpolarity[
["_object_id", "_object_name", "group", "sentiment"]
]
elif group_by == "group":
df_groupedpolarity = (
self.df_comments_full.groupby("group")["polarity"]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
return df_groupedpolarity.reset_index()[["group", "sentiment"]]
elif group_by == "all":
df_groupedpolarity = (
self.df_comments_full.groupby("all")["polarity"]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
return df_groupedpolarity.reset_index()[["all", "sentiment"]]
elif group_by == "time-account":
df_groupedpolarity = (
self.df_comments_full.groupby(
["created_time", "page_id", "page_name", "post_id", "group"]
)["polarity"]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
df_groupedpolarity = df_groupedpolarity.reset_index().rename(
columns={"page_id": "_object_id", "page_name": "_object_name"}
)
df_groupedpolarity = df_groupedpolarity.sort_values(by=["created_time"])
columns = [
"created_time",
"_object_id",
"_object_name",
"sentiment",
"post_id",
]
if self.r_group == True:
columns.append("group")
return df_groupedpolarity[columns].reset_index()
else:
print(
f"Warning: {group_by} Invalid parameter value for parameter group_by, grouping by all"
)
df_groupedpolarity = (
self.df_comments_full.groupby("all")["polarity"]
.value_counts()
.unstack()
.fillna(0)
)
df_groupedpolarity["sentiment"] = df_groupedpolarity.to_dict(
orient="records"
)
return df_groupedpolarity.reset_index()[["all", "sentiment"]]
except Exception as e:
print(e)
error_1 = sys.exc_info()[0]
print(ERR_SYS + str(error_1))
print(f"Class: {self.__str__()}\nMethod: {METHOD_NAME}")
if group_by == "account":
df_groupedpolarity = pd.DataFrame(
columns=["_object_id", "_object_name", "group", "sentiment"]
)
elif group_by == "group":
df_groupedpolarity = pd.DataFrame(columns=["group", "sentiment"])
elif group_by == "all":
df_groupedpolarity = pd.DataFrame(columns=["all", "sentiment"])
else:
print(
f"Warning: {group_by} Invalid parameter value for parameter group_by, grouping by all"
)
df_groupedpolarity = pd.DataFrame(columns=["all", "sentiment"])
return df_groupedpolarity
|
PypiClean
|
/asocksimap-1.0.3.tar.gz/asocksimap-1.0.3/README.md
|
# Connect to IMAP through Socks using Python asyncio
## Dependencies
- aioimaplib 1.0.1+
- aiosocks 0.2.6+
## Installation
```bash
pip install asocksimap
```
or
```bash
pip install git+https://github.com/optinsoft/asocksimap.git
```
## Usage
```python
import asyncio
from asocksimap import AsyncSocksIMAP4_SSL
from functools import reduce
def checkResponse(res, func):
if res.result != 'OK':
msg = reduce(lambda s, i: (s + "\n " if i > 0 else "") + res.lines[i].decode('utf8'), range(len(res.lines)), "")
if not msg: msg = f"{func} failed"
raise Exception(msg)
async def aimap_test():
email_address = '[email protected]'
password = 'YOUR_PASSWORD'
imap_server = 'outlook.office365.com'
imap_port = 993
socks_addr = '127.0.0.1'
socks_port = 1080
socks_type = 'socks5'
aimap = AsyncSocksIMAP4_SSL(host=imap_server, port=imap_port, timeout=15,
proxy_addr=socks_addr, proxy_port=socks_port, proxy_type=socks_type)
await aimap.wait_hello_from_server()
res = await aimap.login(email_address, password)
checkResponse(res, "login")
res = await aimap.logout()
checkResponse(res, "logout")
loop = asyncio.get_event_loop()
loop.run_until_complete(aimap_test())
```
|
PypiClean
|
/tensorflow_tflex-1.13.1rc1-cp27-cp27mu-manylinux1_x86_64.whl/tensorflow_tflex-1.13.1rc1.data/purelib/tensorflow/contrib/testing/python/framework/fake_summary_writer.py
|
"""Fake summary writer for unit tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import summary_pb2
from tensorflow.python.framework import test_util
from tensorflow.python.summary.writer import writer
from tensorflow.python.summary.writer import writer_cache
# TODO(ptucker): Replace with mock framework.
class FakeSummaryWriter(object):
"""Fake summary writer."""
_replaced_summary_writer = None
@classmethod
def install(cls):
if cls._replaced_summary_writer:
raise ValueError('FakeSummaryWriter already installed.')
cls._replaced_summary_writer = writer.FileWriter
writer.FileWriter = FakeSummaryWriter
writer_cache.FileWriter = FakeSummaryWriter
@classmethod
def uninstall(cls):
if not cls._replaced_summary_writer:
raise ValueError('FakeSummaryWriter not installed.')
writer.FileWriter = cls._replaced_summary_writer
writer_cache.FileWriter = cls._replaced_summary_writer
cls._replaced_summary_writer = None
def __init__(self, logdir, graph=None):
self._logdir = logdir
self._graph = graph
self._summaries = {}
self._added_graphs = []
self._added_meta_graphs = []
self._added_session_logs = []
self._added_run_metadata = {}
@property
def summaries(self):
return self._summaries
def assert_summaries(self,
test_case,
expected_logdir=None,
expected_graph=None,
expected_summaries=None,
expected_added_graphs=None,
expected_added_meta_graphs=None,
expected_session_logs=None):
"""Assert expected items have been added to summary writer."""
if expected_logdir is not None:
test_case.assertEqual(expected_logdir, self._logdir)
if expected_graph is not None:
test_case.assertTrue(expected_graph is self._graph)
expected_summaries = expected_summaries or {}
for step in expected_summaries:
test_case.assertTrue(
step in self._summaries,
msg='Missing step %s from %s.' % (step, self._summaries.keys()))
actual_simple_values = {}
for step_summary in self._summaries[step]:
for v in step_summary.value:
# Ignore global_step/sec since it's written by Supervisor in a
# separate thread, so it's non-deterministic how many get written.
if 'global_step/sec' != v.tag:
actual_simple_values[v.tag] = v.simple_value
test_case.assertEqual(expected_summaries[step], actual_simple_values)
if expected_added_graphs is not None:
test_case.assertEqual(expected_added_graphs, self._added_graphs)
if expected_added_meta_graphs is not None:
test_case.assertEqual(len(expected_added_meta_graphs),
len(self._added_meta_graphs))
for expected, actual in zip(expected_added_meta_graphs,
self._added_meta_graphs):
test_util.assert_meta_graph_protos_equal(test_case, expected, actual)
if expected_session_logs is not None:
test_case.assertEqual(expected_session_logs, self._added_session_logs)
def add_summary(self, summ, current_global_step):
"""Add summary."""
if isinstance(summ, bytes):
summary_proto = summary_pb2.Summary()
summary_proto.ParseFromString(summ)
summ = summary_proto
if current_global_step in self._summaries:
step_summaries = self._summaries[current_global_step]
else:
step_summaries = []
self._summaries[current_global_step] = step_summaries
step_summaries.append(summ)
# NOTE: Ignore global_step since its value is non-deterministic.
def add_graph(self, graph, global_step=None, graph_def=None):
"""Add graph."""
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
if graph_def is not None:
raise ValueError('Unexpected graph_def %s.' % graph_def)
self._added_graphs.append(graph)
def add_meta_graph(self, meta_graph_def, global_step=None):
"""Add metagraph."""
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
self._added_meta_graphs.append(meta_graph_def)
# NOTE: Ignore global_step since its value is non-deterministic.
def add_session_log(self, session_log, global_step=None):
# pylint: disable=unused-argument
self._added_session_logs.append(session_log)
def add_run_metadata(self, run_metadata, tag, global_step=None):
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
self._added_run_metadata[tag] = run_metadata
def flush(self):
pass
def reopen(self):
pass
def close(self):
pass
|
PypiClean
|
/kani-0.2.0.tar.gz/kani-0.2.0/docs/engines.rst
|
Engines
=======
Engines are the means by which kani interact with language models. As you've seen, kani comes with a few engines
included:
.. include:: shared/engine_table.rst
In this section, we'll discuss how to implement your own engine to use any language model or API you can think of.
.. tip::
Built an engine for a model kani doesn't support yet?
kani is OSS and |:heart:| PRs with engine implementations for the latest models - see :doc:`contributing`.
Implementing an Engine
----------------------
To create your own engine, all you have to do is subclass :class:`.BaseEngine`:
.. autoclass:: kani.engines.base.BaseEngine
:noindex:
:members:
A new engine must implement at least the two abstract methods and set the abstract attribute:
- :meth:`.BaseEngine.message_len` takes a single :class:`.ChatMessage` and returns the length of that message,
in tokens.
- :meth:`.BaseEngine.predict` takes a list of :class:`.ChatMessage` and :class:`.AIFunction` and returns a
new :class:`.BaseCompletion`.
- :attr:`.BaseEngine.max_context_size` specifies the model's token context size.
With just these three implementations, an engine will be fully functional!
kani comes with a couple additional bases and utilities to help you build engines for models on HuggingFace or with
an available HTTP API.
Optional Methods
^^^^^^^^^^^^^^^^
Engines also come with a set of optional methods/attributes to override that you can use to customize its behaviour
further. For example, engines often have to add a custom model-specific prompt in order to expose functions to
the underlying model, and kani needs to know about the extra tokens added by this prompt!
- :attr:`.BaseEngine.token_reserve`: if your engine needs to reserve tokens (e.g. for a one-time prompt template).
- :meth:`.BaseEngine.function_token_reserve`: specify how many tokens are needed to expose a set of functions to the
model.
- :meth:`.BaseEngine.close`: if your engine needs to clean up resources during shutdown.
HTTP Client
-----------
If your language model backend exposes an HTTP API, you can create a subclass of :class:`.BaseClient` to interface with
it. Your engine should then create an instance of the new HTTP client and call it to make predictions.
Minimally, to use the HTTP client, your subclass should set the ``SERVICE_BASE`` class variable.
.. seealso::
The source code of the :class:`.OpenAIClient`, which uses the HTTP client.
.. autoclass:: kani.engines.httpclient.BaseClient
:noindex:
:members:
HuggingFace
-----------
If your language model backend is available on HuggingFace or is compatible with ``transformers``'
``AutoModelForCausalLM`` interface, kani includes a base engine that implements a prediction pipeline.
Instead of having to implement the prediction logic, all you have to do is subclass :class:`.HuggingEngine` and
implement :meth:`~.HuggingEngine.build_prompt` and :meth:`~.BaseEngine.message_len`.
.. seealso::
The source code of the :class:`.LlamaEngine`, which uses the HuggingEngine.
.. autoclass:: kani.engines.huggingface.base.HuggingEngine
:noindex:
.. automethod:: kani.engines.huggingface.base.HuggingEngine.build_prompt
:noindex:
.. automethod:: kani.engines.huggingface.base.HuggingEngine.message_len
:noindex:
.. _4b_quant:
4-bit Quantization (|:hugging:|)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you're running your model locally, you might run into issues because large language models are, well, *large*!
Unless you pay for a massive compute cluster (|:money_with_wings:|) or have access to one at your institution, you
might not be able to fit models with billions of params on your GPU. That's where model quantization comes into play.
Using FP4 quantization you can expect to reduce up to 8x the model size compared to its native full precision
version.
In this section, we'll show how to load HuggingFace models in FP4.
.. seealso::
We're mostly going to follow the HuggingFace documentation found here:
https://huggingface.co/docs/transformers/perf_infer_gpu_one
**Install Dependencies**
First, you'll need to install kani with the ``huggingface`` extra (and any other extras necessary for your engine;
we'll use LLaMA v2 in this example, so you'll want ``pip install 'kani[huggingface,llama]'``\ .)
After that, you'll need to install ``bitsandbytes`` and ``accelerate``:
.. code-block:: console
$ pip install bitsandbytes>=0.39.0 accelerate
.. caution:: The ``bitsandbytes`` library is currently only UNIX-compatible.
**Set Load Arguments**
Then, you'll need to set the ``model_load_kwargs`` when initializing your model, and use the engine as normal! This
example shows the :class:`.LlamaEngine`, but the same arguments should apply to any subclass of the
:class:`.HuggingEngine`.
.. code-block:: python
:emphasize-lines: 4-7
engine = LlamaEngine(
use_auth_token=True,
strict=True,
model_load_kwargs={
"device_map": "auto",
"load_in_4bit": True,
},
)
**Memory Usage Comparison**
This table shows the effect of enabling fp4 quantization on GPU memory usage and inference speed on ``Llama-2-7b-chat``.
These numbers represent the average of three runs on a consumer RTX 4070ti (12GB memory) with greedy sampling.
+--------------+----------------------+----------------------------------------+
| fp4 Enabled? | Memory Usage | Inference Time (per token) |
+==============+======================+========================================+
| No | 26.6GB | 1215.6 ms |
+--------------+----------------------+----------------------------------------+
| Yes | 5.0GB (5.32x less) | 23.6 ms (51.5x speedup\ [#shared]_) |
+--------------+----------------------+----------------------------------------+
.. [#shared] Since the memory usage without fp4 enabled is larger than the VRAM size of my GPU, some weights were stored
in shared memory. This likely led to much slower inference compared to storing all weights on a GPU.
CTransformers
-------------
If your language model backend is available with GGML, kani includes a base engine that implements
a prediction pipeline.
Instead of having to implement the prediction logic, all you have to do is subclass :class:`.CTransformersEngine` and
implement :meth:`~.CTransformersEngine.build_prompt` and :meth:`~.BaseEngine.message_len`.
.. seealso::
The source code of the :class:`.LlamaCTransformersEngine`, which uses the CTransformersEngine.
.. autoclass:: kani.engines.ctransformers.base.CTransformersEngine
:noindex:
.. automethod:: kani.engines.ctransformers.base.CTransformersEngine.build_prompt
:noindex:
.. automethod:: kani.engines.ctransformers.base.CTransformersEngine.message_len
:noindex:
|
PypiClean
|
/rvtools_python-0.0.1-py3-none-any.whl/rvtools/rvtools.py
|
""" Main rvtools module """
import os
import ssl
import argparse
import requests
import urllib3
from pyVim import connect
from rvtools.corerv import *
from rvtools.vinfo.vinfo import *
# requests.packages.urllib3.disable_warnings()
urllib3.disable_warnings()
def get_args():
parser = argparse.ArgumentParser(description="RVTools Python parameters")
parser.add_argument('-s', '--host',
required=False,
action='store',
help='vCenter server to connect to')
parser.add_argument('-u', '--username',
required=False,
action='store',
help='vCenter username')
parser.add_argument('-p', '--password',
required=False,
action='store',
help='vCenter username password')
parser.add_argument('-d', '--directory',
required=False,
action='store',
help='Directory where will be saved all csv files. Should be empty')
parser.add_argument('-v', '--verbose',
required=False,
action='store',
help='Show additional info.')
args = parser.parse_args()
return args
def main():
""" Def responsible to start the vCenter connection and call all report modules """
args = get_args()
if (args.host is None or args.username is None or args.password is None or args.directory is None):
print("Reading Conf File")
obj = CoreCode()
conn = obj.read_conf_file()
if conn is None:
exit()
else:
server = conn._vcenter
username = conn._username
password = conn._password
directory = conn._directory
if server == '<fqdn>':
print("You are using default values. Please update the file")
print("~/.rvtools.conf or just pass all mandatory parameters.")
exit()
else:
print("Using flags")
server = args.host
username = args.username
password = args.password
directory = args.directory
if not os.path.isdir(directory):
print("You have to create the dir {}".format(directory))
exit()
ssl_context = ssl._create_unverified_context()
print("vcenter: {}\nuser: {}\n".format(server, username))
service_instance = connect.SmartConnect(host=server, user=username, \
pwd=password, port=443, sslContext=ssl_context)
# VM Information
# vinfo_collect(service_instance)
vinfo_collect(service_instance, directory)
# https://code.vmware.com/apis/358/vsphere
if __name__ == "__main__":
main()
|
PypiClean
|
/InvokeAI-3.1.0-py3-none-any.whl/invokeai/backend/model_management/models/stable_diffusion_onnx.py
|
from enum import Enum
from typing import Literal
from diffusers import OnnxRuntimeModel
from .base import (
ModelConfigBase,
BaseModelType,
ModelType,
ModelVariantType,
DiffusersModel,
SchedulerPredictionType,
classproperty,
IAIOnnxRuntimeModel,
)
class StableDiffusionOnnxModelFormat(str, Enum):
Olive = "olive"
Onnx = "onnx"
class ONNXStableDiffusion1Model(DiffusersModel):
class Config(ModelConfigBase):
model_format: Literal[StableDiffusionOnnxModelFormat.Onnx]
variant: ModelVariantType
def __init__(self, model_path: str, base_model: BaseModelType, model_type: ModelType):
assert base_model == BaseModelType.StableDiffusion1
assert model_type == ModelType.ONNX
super().__init__(
model_path=model_path,
base_model=BaseModelType.StableDiffusion1,
model_type=ModelType.ONNX,
)
for child_name, child_type in self.child_types.items():
if child_type is OnnxRuntimeModel:
self.child_types[child_name] = IAIOnnxRuntimeModel
# TODO: check that no optimum models provided
@classmethod
def probe_config(cls, path: str, **kwargs):
model_format = cls.detect_format(path)
in_channels = 4 # TODO:
if in_channels == 9:
variant = ModelVariantType.Inpaint
elif in_channels == 4:
variant = ModelVariantType.Normal
else:
raise Exception("Unkown stable diffusion 1.* model format")
return cls.create_config(
path=path,
model_format=model_format,
variant=variant,
)
@classproperty
def save_to_config(cls) -> bool:
return True
@classmethod
def detect_format(cls, model_path: str):
# TODO: Detect onnx vs olive
return StableDiffusionOnnxModelFormat.Onnx
@classmethod
def convert_if_required(
cls,
model_path: str,
output_path: str,
config: ModelConfigBase,
base_model: BaseModelType,
) -> str:
return model_path
class ONNXStableDiffusion2Model(DiffusersModel):
# TODO: check that configs overwriten properly
class Config(ModelConfigBase):
model_format: Literal[StableDiffusionOnnxModelFormat.Onnx]
variant: ModelVariantType
prediction_type: SchedulerPredictionType
upcast_attention: bool
def __init__(self, model_path: str, base_model: BaseModelType, model_type: ModelType):
assert base_model == BaseModelType.StableDiffusion2
assert model_type == ModelType.ONNX
super().__init__(
model_path=model_path,
base_model=BaseModelType.StableDiffusion2,
model_type=ModelType.ONNX,
)
for child_name, child_type in self.child_types.items():
if child_type is OnnxRuntimeModel:
self.child_types[child_name] = IAIOnnxRuntimeModel
# TODO: check that no optimum models provided
@classmethod
def probe_config(cls, path: str, **kwargs):
model_format = cls.detect_format(path)
in_channels = 4 # TODO:
if in_channels == 9:
variant = ModelVariantType.Inpaint
elif in_channels == 5:
variant = ModelVariantType.Depth
elif in_channels == 4:
variant = ModelVariantType.Normal
else:
raise Exception("Unkown stable diffusion 2.* model format")
if variant == ModelVariantType.Normal:
prediction_type = SchedulerPredictionType.VPrediction
upcast_attention = True
else:
prediction_type = SchedulerPredictionType.Epsilon
upcast_attention = False
return cls.create_config(
path=path,
model_format=model_format,
variant=variant,
prediction_type=prediction_type,
upcast_attention=upcast_attention,
)
@classproperty
def save_to_config(cls) -> bool:
return True
@classmethod
def detect_format(cls, model_path: str):
# TODO: Detect onnx vs olive
return StableDiffusionOnnxModelFormat.Onnx
@classmethod
def convert_if_required(
cls,
model_path: str,
output_path: str,
config: ModelConfigBase,
base_model: BaseModelType,
) -> str:
return model_path
|
PypiClean
|
/ensmallen_graph-0.6.0-cp37-cp37m-manylinux2010_x86_64.whl/ensmallen_graph/datasets/string/paracoccusyeei.py
|
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def ParacoccusYeei(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Paracoccus yeei graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Paracoccus yeei graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-03 23:03:05.647137
The undirected graph Paracoccus yeei has 3996 nodes and 292503 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.03665 and has 5 connected components, where the component with most
nodes has 3988 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 109, the mean node degree is 146.40, and
the node degree mode is 1. The top 5 most central nodes are 1446473.JHWH01000001_gene1328
(degree 1518), 1446473.JHWH01000007_gene413 (degree 1166), 1446473.JHWH01000026_gene2027
(degree 1049), 1446473.JHWH01000013_gene2404 (degree 1012) and 1446473.JHWH01000007_gene411
(degree 958).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import ParacoccusYeei
# Then load the graph
graph = ParacoccusYeei()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="ParacoccusYeei",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
PypiClean
|
/nni_yds-0.3.7-py3-none-any.whl/nni_yds-0.3.7.data/data/nni/node_modules/readable-stream/lib/_stream_readable.js
|
'use strict';
module.exports = Readable;
/*<replacement>*/
var processNextTick = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Readable.ReadableState = ReadableState;
var EE = require('events');
/*<replacement>*/
var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
var Stream;
(function () {
try {
Stream = require('st' + 'ream');
} catch (_) {} finally {
if (!Stream) Stream = require('events').EventEmitter;
}
})();
/*</replacement>*/
var Buffer = require('buffer').Buffer;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
/*<replacement>*/
var debugUtil = require('util');
var debug = undefined;
if (debugUtil && debugUtil.debuglog) {
debug = debugUtil.debuglog('stream');
} else {
debug = function () {};
}
/*</replacement>*/
var StringDecoder;
util.inherits(Readable, Stream);
var Duplex;
function ReadableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~ ~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
this.resumeScheduled = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
var Duplex;
function Readable(options) {
Duplex = Duplex || require('./_stream_duplex');
if (!(this instanceof Readable)) return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
if (options && typeof options.read === 'function') this._read = options.read;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function (chunk, encoding) {
var state = this._readableState;
if (!state.objectMode && typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function (chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
Readable.prototype.isPaused = function () {
return this._readableState.flowing === false;
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null) {
state.reading = false;
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
var skipAdd;
if (state.decoder && !addToFront && !encoding) {
chunk = state.decoder.write(chunk);
skipAdd = !state.objectMode && chunk.length === 0;
}
if (!addToFront) state.reading = false;
// Don't add to the buffer if we've decoded to an empty string chunk and
// we're not in object mode
if (!skipAdd) {
// if we want the data now, just emit it.
if (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
if (state.needReadable) emitReadable(stream);
}
}
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function (enc) {
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
};
// Don't raise the hwm > 8MB
var MAX_HWM = 0x800000;
function computeNewHighWaterMark(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
n |= n >>> 1;
n |= n >>> 2;
n |= n >>> 4;
n |= n >>> 8;
n |= n >>> 16;
n++;
}
return n;
}
function howMuchToRead(n, state) {
if (state.length === 0 && state.ended) return 0;
if (state.objectMode) return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length;
}
if (n <= 0) return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else {
return state.length;
}
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function (n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
if (typeof n !== 'number' || n > 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
if (state.length === 0) endReadable(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
debug('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
if (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
debug('length less than watermark', doRead);
}
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading) {
doRead = false;
debug('reading or ended', doRead);
}
if (doRead) {
debug('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
if (doRead && !state.reading) n = howMuchToRead(nOrig, state);
var ret;
if (n > 0) ret = fromList(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
if (nOrig !== n && state.ended && state.length === 0) endReadable(this);
if (ret !== null) this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.ended) return;
if (state.decoder) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// emit 'readable' now to make sure it gets picked up.
emitReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream);
}
}
function emitReadable_(stream) {
debug('emit readable');
stream.emit('readable');
flow(stream);
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
processNextTick(maybeReadMore_, stream, state);
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;else len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function (n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
debug('onunpipe');
if (readable === src) {
cleanup();
}
}
function onend() {
debug('onend');
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
var cleanedUp = false;
function cleanup() {
debug('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
src.removeListener('data', ondata);
cleanedUp = true;
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
var ret = dest.write(chunk);
if (false === ret) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) {
debug('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
}
src.pause();
}
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
debug('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
debug('unpipe');
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
debug('pipe resume');
src.resume();
}
return dest;
};
function pipeOnDrain(src) {
return function () {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
if (state.awaitDrain) state.awaitDrain--;
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow(src);
}
};
}
Readable.prototype.unpipe = function (dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes) return this;
if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
if (dest) dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
for (var _i = 0; _i < len; _i++) {
dests[_i].emit('unpipe', this);
}return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1) return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
// then call resume to start the flow of data on the next tick.
if (ev === 'data' && false !== this._readableState.flowing) {
this.resume();
}
if (ev === 'readable' && !this._readableState.endEmitted) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
processNextTick(nReadingNextTick, this);
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
function nReadingNextTick(self) {
debug('readable nexttick read 0');
self.read(0);
}
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
state.flowing = true;
resume(this, state);
}
return this;
};
function resume(stream, state) {
if (!state.resumeScheduled) {
state.resumeScheduled = true;
processNextTick(resume_, stream, state);
}
}
function resume_(stream, state) {
if (!state.reading) {
debug('resume read 0');
stream.read(0);
}
state.resumeScheduled = false;
stream.emit('resume');
flow(stream);
if (state.flowing && !state.reading) stream.read(0);
}
Readable.prototype.pause = function () {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
this._readableState.flowing = false;
this.emit('pause');
}
return this;
};
function flow(stream) {
var state = stream._readableState;
debug('flow', state.flowing);
if (state.flowing) {
do {
var chunk = stream.read();
} while (null !== chunk && state.flowing);
}
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function (stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function () {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) self.push(chunk);
}
self.push(null);
});
stream.on('data', function (chunk) {
debug('wrapped data');
if (state.decoder) chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (this[i] === undefined && typeof stream[i] === 'function') {
this[i] = function (method) {
return function () {
return stream[method].apply(stream, arguments);
};
}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function (ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function (n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0) return null;
if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode) ret = '';else ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy);
if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0) throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
processNextTick(endReadableNT, state, stream);
}
}
function endReadableNT(state, stream) {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
}
function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
|
PypiClean
|
/NuPlone-2.2.0.tar.gz/NuPlone-2.2.0/plonetheme/nuplone/skin/tools.py
|
from AccessControl import getSecurityManager
from Acquisition import aq_inner
from plone import api
from plone.memoize.view import memoize
from plonetheme.nuplone import utils
from Products.Five import BrowserView
class Tools(BrowserView):
"""Basic view to expose utilties to templates."""
@property
@memoize
def user(self):
return getSecurityManager().getUser()
@property
@memoize
def anonymous(self):
return self.user is None or self.user.getUserName() == "Anonymous User"
@property
@memoize
def portal(self):
return utils.getPortal(self.context)
@property
@memoize
def portal_url(self):
return self.portal.absolute_url()
@property
@memoize
def navroot(self):
return utils.getNavigationRoot(self.context)
@property
@memoize
def navroot_url(self):
return self.navroot.absolute_url()
@property
@memoize
def context_url(self):
return aq_inner(self.context).absolute_url()
def render(self):
"""Little trick to make it easier to access this via from a TALES
expression."""
return self
def view_type(self):
return utils.viewType(self.context, self.request)
def site_title(self):
return api.portal.get_registry_record("plone.site_title")
def formatDate(self, date, length="long"):
return utils.formatDate(self.request, date, length)
def formatTime(self, time, length=None):
return utils.formatTime(self.request, time, length)
def formatDatetime(self, timestamp, length="long"):
return utils.formatDateTime(self.request, timestamp, length)
def formatDecimal(self, value, length=None):
return self.request.locale.numbers.getFormatter("decimal", length).format(value)
def formatPercentage(self, value, length=None):
return self.request.locale.numbers.getFormatter("percent", length).format(value)
def countryName(self, code):
return self.request.locale.displayNames.territories.get(code.upper())
def languageName(self, code, default=None):
code = code.lower()
names = self.request.locale.displayNames.languages
return names.get(code, default)
def checkPermission(self, permission):
return utils.checkPermission(self.context, permission)
|
PypiClean
|
/monk_gluon_cuda101-0.0.1.tar.gz/monk_gluon_cuda101-0.0.1/monk/pytorch/optimizers/optimizers.py
|
from monk.pytorch.optimizers.imports import *
from monk.system.imports import *
@accepts(dict, [int, float], momentum=[int, float], momentum_dampening_rate=[int, float], weight_decay=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def sgd(system_dict, learning_rate, momentum=0, weight_decay=0, momentum_dampening_rate=0, clipnorm=0.0, clipvalue=0.0):
'''
Select stochastic gradient descent optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
momentum (float): Momentum value for driving the weights towards minima
weight_decay (float): Value for regularizing weights post every update
momentum_dampening_rate (float): Reduction rate for momentum
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "sgd";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "sgd";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["momentum"] = momentum;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["momentum_dampening_rate"] = momentum_dampening_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], momentum=[int, float], momentum_dampening_rate=[int, float], weight_decay=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def nesterov_sgd(system_dict, learning_rate, momentum=0, weight_decay=0, momentum_dampening_rate=0, clipnorm=0.0, clipvalue=0.0):
'''
Select stochastic gradient descent optimizer with nesterov acceleration
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
momentum (float): Momentum value for driving the weights towards minima
weight_decay (float): Value for regularizing weights post every update
momentum_dampening_rate (float): Reduction rate for momentum
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "nesterov_sgd";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "nesterov_sgd";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["momentum"] = momentum;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["momentum_dampening_rate"] = momentum_dampening_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], decay_rate=[int, float], epsilon=[int, float], weight_decay=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def rmsprop(system_dict, learning_rate, decay_rate=0.99, epsilon=1e-08, weight_decay=0,
clipnorm=0.0, clipvalue=0.0):
'''
Select root mean score prop optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
decay_rate (float): A decay factor of moving average over past squared gradient.
epsilon (float): A value to avoid division by zero
weight_decay (float): Value for regularizing weights post every update
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "rmsprop";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "rmsprop";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["decay_rate"] = decay_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], decay_rate=[int, float], epsilon=[int, float], weight_decay=[int, float],
momentum=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def momentum_rmsprop(system_dict, learning_rate, decay_rate=0.99, epsilon=1e-08, weight_decay=0,
momentum=0.9):
'''
Select root mean score prop optimizer with momentum
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
decay_rate (float): A decay factor of moving average over past squared gradient.
epsilon (float): A value to avoid division by zero
weight_decay (float): Value for regularizing weights post every update
momentum (float): Momentum value for driving the weights towards minima
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "rmsprop";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "rmsprop";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["decay_rate"] = decay_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["momentum"] = momentum;
return system_dict;
@accepts(dict, [int, float], beta1=[int, float], beta2=[int, float], epsilon=[int, float], weight_decay=[int, float], amsgrad=bool,
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def adam(system_dict, learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-08, weight_decay=0, amsgrad=False, clipnorm=0.0, clipvalue=0.0):
'''
Select ADAM optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
beta1 (float): Exponential decay rate for first momentum estimates
beta2 (float): Exponential decay rate for first second estimates
weight_decay (float): Value for regularizing weights post every update
amsgrad (bool): If True, AMSGrad variant of this algorithm is used
epsilon (float): A value to avoid division by zero
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "adam";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "adam";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta1"] = beta1;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta2"] = beta2;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["amsgrad"] = amsgrad;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], beta1=[int, float], beta2=[int, float], epsilon=[int, float], weight_decay=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def adamax(system_dict, learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-08, weight_decay=0, clipnorm=0.0, clipvalue=0.0):
'''
Select Adamax optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
beta1 (float): Exponential decay rate for first momentum estimates
beta2 (float): Exponential decay rate for first second estimates
weight_decay (float): Value for regularizing weights post every update
epsilon (float): A value to avoid division by zero
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "adamax";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "adamax";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta1"] = beta1;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta2"] = beta2;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], beta1=[int, float], beta2=[int, float], epsilon=[int, float], weight_decay=[int, float], amsgrad=bool, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def adamw(system_dict, learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-08, weight_decay=0, amsgrad=False):
'''
Select ADAMW variant of ADAM optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
beta1 (float): Exponential decay rate for first momentum estimates
beta2 (float): Exponential decay rate for first second estimates
weight_decay (float): Value for regularizing weights post every update
amsgrad (bool): If True, AMSGrad variant of this algorithm is used
epsilon (float): A value to avoid division by zero
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "adamw";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "adamw";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta1"] = beta1;
system_dict["hyper-parameters"]["optimizer"]["params"]["beta2"] = beta2;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["amsgrad"] = amsgrad;
return system_dict;
@accepts(dict, [int, float], rho=[int, float], epsilon=[int, float], weight_decay=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def adadelta(system_dict, learning_rate, rho=0.9, epsilon=1e-06, weight_decay=0, clipnorm=0.0, clipvalue=0.0):
'''
Select Adadelta optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
rho (float): Exponential decay rate for momentum estimates
weight_decay (float): Value for regularizing weights post every update
epsilon (float): A value to avoid division by zero
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "adadelta";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "adadelta";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["rho"] = rho;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
@accepts(dict, [int, float], learning_rate_decay=[int, float], weight_decay=[int, float], epsilon=[int, float],
clipnorm=[int, float], clipvalue=[int, float], post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def adagrad(system_dict, learning_rate, learning_rate_decay=0, weight_decay=0, epsilon=0, clipnorm=0.0, clipvalue=0.0):
'''
Select Adagrad optimizer
Args:
system_dict (dict): System dictionary storing experiment state and set variables
learning_rate (float): Initial base learning rate
learning_rate_decay (float): Learning rate decay factor
weight_decay (float): Value for regularizing weights post every update
epsilon (float): A value to avoid division by zero
clipnorm (float): Gradient clipping factor
clipvalue (float): Value for clipping
Returns:
dict: updated system dict
'''
system_dict["local"]["optimizer"] = "adagrad";
system_dict["hyper-parameters"]["learning_rate"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["name"] = "adagrad";
system_dict["hyper-parameters"]["optimizer"]["params"]["lr"] = learning_rate;
system_dict["hyper-parameters"]["optimizer"]["params"]["lr_decay"] = learning_rate_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["epsilon"] = epsilon;
system_dict["hyper-parameters"]["optimizer"]["params"]["weight_decay"] = weight_decay;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipnorm"] = clipnorm;
system_dict["hyper-parameters"]["optimizer"]["params"]["clipvalue"] = clipvalue;
return system_dict;
|
PypiClean
|
/raven_framework-2.3-cp39-cp39-win_amd64.whl/ravenframework/OutStreams/PlotInterfaces/SyntheticCloud.py
|
import matplotlib.pyplot as plt
from .PlotInterface import PlotInterface
from ...utils import InputData, InputTypes
class SyntheticCloud(PlotInterface):
"""
Plots the training data along with a cloud of sampled data for synthetic histories.
"""
@classmethod
def getInputSpecification(cls):
"""
Method to get a reference to a class that specifies the input data for class "cls".
@ In, cls, the class for which we are retrieving the specification
@ Out, inputSpecification, InputData.ParameterInput, class to use for specifying the input of cls.
"""
spec = super().getInputSpecification()
spec.addSub(InputData.parameterInputFactory('training', contentType=InputTypes.StringType,
descr=r"""The name of the RAVEN DataObject from which the training (or original) data should
be taken for this plotter.
This should be the data used to train the surrogate."""))
spec.addSub(InputData.parameterInputFactory('samples', contentType=InputTypes.StringType,
descr=r"""The name of the RAVEN DataObject from which the sampled synthetic histories should
be taken for this plotter."""))
spec.addSub(InputData.parameterInputFactory('macroParam', contentType=InputTypes.StringType,
descr=r"""Name of the macro variable (e.g. Year)."""))
spec.addSub(InputData.parameterInputFactory('microParam', contentType=InputTypes.StringType,
descr=r"""Name of the micro variable or pivot parameter (e.g. Time)."""))
spec.addSub(InputData.parameterInputFactory('variables', contentType=InputTypes.StringListType,
descr=r"""Name of the signal variables to plot."""))
return spec
def __init__(self):
"""
Init of Base class
@ In, None
@ Out, None
"""
super().__init__()
self.printTag = 'OptPath Plot'
self.training = None # DataObject with training data
self.trainingName = None # name of training D.O.
self.samples = None # DataObject with sample data
self.samplesName = None # name of samples D.O.
self.macroName = None # name of macro parameter (e.g. Year)
self.microName = None # name of micro parameter (e.g. Time)
self.variables = None # variable names to plot
self.clusterName = '_ROM_Cluster' # TODO magic name
def handleInput(self, spec):
"""
Loads the input specs for this object.
@ In, spec, InputData.ParameterInput, input specifications
@ Out, None
"""
super().handleInput(spec)
self.trainingName = spec.findFirst('training').value
self.samplesName = spec.findFirst('samples').value
self.macroName = spec.findFirst('macroParam').value
self.microName = spec.findFirst('microParam').value
self.variables = spec.findFirst('variables').value
# checker; this should be superceded by "required" in input params
if self.trainingName is None:
self.raiseAnError(IOError, "Missing <training> node!")
if self.samplesName is None:
self.raiseAnError(IOError, "Missing <samples> node!")
def initialize(self, stepEntities):
"""
Function to initialize the OutStream. It basically looks for the "data"
object and links it to the system.
@ In, stepEntities, dict, contains all the Objects are going to be used in the
current step. The sources are searched into this.
@ Out, None
"""
train = self.findSource(self.trainingName, stepEntities)
if train is None:
self.raiseAnError(IOError, f'No input named "{self.trainingName}" was found in the Step for Plotter "{self.name}"!')
if train.isEmpty:
self.raiseAnError(IOError, f'Data object "{self.trainingName}" is empty!')
self.training = train
sample = self.findSource(self.samplesName, stepEntities)
if sample is None:
self.raiseAnError(IOError, f'No input named "{self.samplesName}" was found in the Step for Plotter "{self.name}"!')
if sample.isEmpty:
self.raiseAnError(IOError, f'Data object "{self.samplesName}" is empty!')
if self.clusterName in sample.getVars():
self.raiseAnError(IOError, f'Data object "{self.samplesName}" uses clusters! For this plotting, please take full samples.')
self.samples = sample
def run(self):
"""
Main run method.
@ In, None
@ Out, None
"""
tTag = self.training.sampleTag
sTag = self.samples.sampleTag
training = self.training.asDataset()
samples = self.samples.asDataset()
alpha = max(0.05, .5/len(samples))
varNames = self.variables
numVars = len(varNames)
# use the len of macro, cluster from samples to build plots
macro = samples[self.macroName]
figCounter = 0
for m, mac in enumerate(macro):
figCounter += 1
fig, axes = plt.subplots(numVars, 1, sharex=True)
if numVars == 1:
axes = [axes]
axes[-1].set_xlabel(self.microName)
mSamples = samples.sel({self.macroName: mac}, drop=True)
mTraining = None
if self.macroName in training:
if int(mac) in training[self.macroName]:
if self.macroName in training.dims:
mTraining = training.drop_sel({self.macroName: mac})
else:
mTraining = training.where(training[self.macroName]==mac, drop=True).squeeze()
for v, var in enumerate(varNames):
ax = axes[v]
# plot cloud of sample data
for s in mSamples[sTag].values:
samp = mSamples[{sTag: s}]
ax.plot(samp[self.microName].values, samp[var].values, 'b-.', alpha=alpha)
ax.set_title(f'{var}, {self.macroName} {int(mac)}')
ax.set_ylabel(var)
if mTraining is not None:
ax.plot(mTraining[self.microName].values, mTraining[var].values, 'k-.')
filename = f'{self.name}_{m}.png'
plt.savefig(filename)
self.raiseAMessage(f'Wrote "{filename}".')
|
PypiClean
|
/eko-0.13.5-py3-none-any.whl/ekore/anomalous_dimensions/unpolarized/space_like/as4/gnsp.py
|
r"""The unpolarized, space-like anomalous dimension :math:`\gamma_{ns,+}^{(3)}`."""
import numba as nb
from .....harmonics import cache as c
from .....harmonics.log_functions import lm11m1, lm12m1, lm13m1
from .gnsm import gamma_ns_nf3
@nb.njit(cache=True)
def gamma_nsp_nf2(n, cache):
r"""Return the parametrized singlet-like non-singlet part proportional to :math:`nf^2`.
Parameters
----------
n : complex
Mellin moment
cache: numpy.ndarray
Harmonic sum cache
Returns
-------
complex
|N3LO| singlet-like non-singlet anomalous dimension :math:`\gamma_{ns,+}^{(3)}|_{nf^2}`
"""
S1 = c.get(c.S1, cache, n)
S2 = c.get(c.S2, cache, n)
S3 = c.get(c.S3, cache, n)
Lm11m1 = lm11m1(n, S1)
Lm12m1 = lm12m1(n, S1, S2)
Lm13m1 = lm13m1(n, S1, S2, S3)
return (
-193.862483821757
- 18.962962962962962 / n**5
+ 99.1604938271605 / n**4
- 226.44075306899038 / n**3
+ 395.60497732877303 / n**2
+ 278.2205375565073 / n
+ 59.46630017646719 / (1.0 + n) ** 3
- 152.70402416764668 / (1.0 + n) ** 2
- 94.57207315818547 / (2.0 + n)
+ 195.5772257829161 * S1
- (517.9354004395117 * S1) / n**2
+ (26.68861454046639 * S1) / n
+ 1.5006487633206929 * Lm11m1
+ 113.48340560825889 * Lm12m1
+ 13.865450025251006 * Lm13m1
)
@nb.njit(cache=True)
def gamma_nsp_nf1(n, cache):
r"""Return the parametrized singlet-like non-singlet part proportional to :math:`nf^1`.
Parameters
----------
n : complex
Mellin moment
cache: numpy.ndarray
Harmonic sum cache
Returns
-------
complex
|N3LO| singlet-like non-singlet anomalous dimension :math:`\gamma_{ns,+}^{(3)}|_{nf^1}`
"""
S1 = c.get(c.S1, cache, n)
S2 = c.get(c.S2, cache, n)
S3 = c.get(c.S3, cache, n)
Lm11m1 = lm11m1(n, S1)
Lm12m1 = lm12m1(n, S1, S2)
Lm13m1 = lm13m1(n, S1, S2, S3)
return (
5550.285178175209
- 126.41975308641975 / n**6
+ 752.1975308641976 / n**5
- 2253.1105700880144 / n**4
+ 5247.1769880520205 / n**3
- 8769.153217295072 / n**2
- 5834.355552528428 / n
+ 537.8609133198307 / (1.0 + n) ** 3
- 718.3874592628895 / (1.0 + n) ** 2
+ 2487.96294221855 / (2.0 + n)
- 5171.916129085788 * S1
+ (12894.65275887218 * S1) / n**2
- (2741.830025124657 * S1) / n
- 849.8232086542307 * Lm11m1
- 3106.3285877376907 * Lm12m1
- 399.22204467960154 * Lm13m1
)
@nb.njit(cache=True)
def gamma_nsp_nf0(n, cache):
r"""Return the parametrized singlet-like non-singlet part proportional to :math:`nf^0`.
Parameters
----------
n : complex
Mellin moment
cache: numpy.ndarray
Harmonic sum cache
Returns
-------
complex
|N3LO| singlet-like non-singlet anomalous dimension :math:`\gamma_{ns,+}^{(3)}|_{nf^0}`
"""
S1 = c.get(c.S1, cache, n)
S2 = c.get(c.S2, cache, n)
S3 = c.get(c.S3, cache, n)
Lm11m1 = lm11m1(n, S1)
Lm12m1 = lm12m1(n, S1, S2)
Lm13m1 = lm13m1(n, S1, S2, S3)
return (
-23391.315223909038
- 252.8395061728395 / n**7
+ 1580.2469135802469 / n**6
- 5806.800104704373 / n**5
+ 14899.91711929902 / n**4
- 28546.38768506619 / n**3
+ 50759.65541232588 / n**2
+ 21477.757730073346 / n
+ 47399.00434062458 / (1.0 + n) ** 3
- 15176.296853013831 / (1.0 + n) ** 2
- 11103.411980157494 / (2.0 + n)
+ 20702.353028966703 * S1
- (73498.98594171858 * S1) / n**2
+ (16950.937339235086 * S1) / n
- 43731.12143482942 * Lm11m1
- 2518.9090401926924 * Lm12m1
- 973.3270027901576 * Lm13m1
)
@nb.njit(cache=True)
def gamma_nsp(n, nf, cache):
r"""Compute the |N3LO| singlet-like non-singlet anomalous dimension.
Parameters
----------
n : complex
Mellin moment
nf : int
Number of active flavors
cache: numpy.ndarray
Harmonic sum cache
Returns
-------
complex
|N3LO| singlet-like non-singlet anomalous dimension
:math:`\gamma_{ns,+}^{(3)}(N)`
"""
return (
gamma_nsp_nf0(n, cache)
+ nf * gamma_nsp_nf1(n, cache)
+ nf**2 * gamma_nsp_nf2(n, cache)
+ nf**3 * gamma_ns_nf3(n, cache)
)
|
PypiClean
|
/vertica_python-1.3.4-py3-none-any.whl/vertica_python/vertica/messages/frontend_messages/__init__.py
|
# Copyright (c) 2013-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function, division, absolute_import
from .bind import Bind
from .cancel_request import CancelRequest
from .close import Close
from .copy_data import CopyData
from .copy_done import CopyDone
from .copy_error import CopyError
from .copy_fail import CopyFail
from .describe import Describe
from .end_of_batch_request import EndOfBatchRequest
from .execute import Execute
from .flush import Flush
from .load_balance_request import LoadBalanceRequest
from .parse import Parse
from .password import Password
from .query import Query
from .ssl_request import SslRequest
from .startup import Startup
from .sync import Sync
from .terminate import Terminate
from .verified_files import VerifiedFiles
__all__ = ['Bind', 'CancelRequest', 'Close', 'CopyData', 'CopyDone', 'CopyError',
'CopyFail', 'Describe', 'EndOfBatchRequest', 'Execute', 'Flush',
'LoadBalanceRequest', 'Parse', 'Password', 'Query', 'SslRequest', 'Startup',
'Sync', 'Terminate', 'VerifiedFiles']
|
PypiClean
|
/rexsio_agent-0.0.8-py3-none-any.whl/agent/commands/service_status.py
|
import docker
from agent.commands.utils import prepare_message_to_send
from agent.constants import SERVICE_DOWN, SERVICE_UP, SERVICES_STATUS
from agent.services.utils import get_services_id_list
def check_service_statuses():
service_id_list = get_services_id_list()
client = docker.from_env()
service_status_list = _get_services_status_list(service_id_list, client.containers)
services_to_restart = _get_services_id_to_restart(service_status_list)
service_status_body = {"statuses":service_status_list}
service_status_message = prepare_message_to_send(message_type=SERVICES_STATUS, body=service_status_body)
return service_status_message, services_to_restart
def _get_services_status_list(id_list, containers):
statuses_list = [_get_service_status(containers, service_id) for service_id in id_list]
services_statuses = [dict(serviceId=service_id, status=status) for service_id, status in
list(zip(id_list, statuses_list))]
return _remove_services_with_none_status(services_statuses)
def _remove_services_with_none_status(services_statuses):
services_without_none_status = []
for service in services_statuses:
if service['status'] is not None:
services_without_none_status.append(service)
return services_without_none_status
def _get_service_status(containers, service_id):
service_filter = service_id.lower()
service_containers = containers.list(all=True, filters=dict(name=service_filter))
if not service_containers:
service_containers = containers.list(all=True, filters=dict(name=service_filter.replace('-', '')))
if not service_containers:
return None
service_containers_status = [container.status for container in service_containers]
return SERVICE_UP if _are_all_containers_running(service_containers_status) else SERVICE_DOWN
def _are_all_containers_running(service_containers_status):
return all(container_status == 'running' for container_status in service_containers_status)
def _get_services_id_to_restart(service_status_list):
return [service['serviceId'] for service in service_status_list if service['status'] == SERVICE_DOWN]
|
PypiClean
|
/pyphi-1.2.0.tar.gz/pyphi-1.2.0/LICENSE.md
|
GNU General Public License
==========================
Version 3, 29 June 2007
Copyright © 2007 Free Software Foundation, Inc. <<http://fsf.org/>>
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
## Preamble
The GNU General Public License is a free, copyleft license for software and other
kinds of works.
The licenses for most software and other practical works are designed to take away
your freedom to share and change the works. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change all versions of a
program--to make sure it remains free software for all its users. We, the Free
Software Foundation, use the GNU General Public License for most of our software; it
applies also to any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not price. Our General
Public Licenses are designed to make sure that you have the freedom to distribute
copies of free software (and charge for them if you wish), that you receive source
code or can get it if you want it, that you can change the software or use pieces of
it in new free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you these rights or
asking you to surrender the rights. Therefore, you have certain responsibilities if
you distribute copies of the software, or if you modify it: responsibilities to
respect the freedom of others.
For example, if you distribute copies of such a program, whether gratis or for a fee,
you must pass on to the recipients the same freedoms that you received. You must make
sure that they, too, receive or can get the source code. And you must show them these
terms so they know their rights.
Developers that use the GNU GPL protect your rights with two steps: (1) assert
copyright on the software, and (2) offer you this License giving you legal permission
to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains that there is
no warranty for this free software. For both users' and authors' sake, the GPL
requires that modified versions be marked as changed, so that their problems will not
be attributed erroneously to authors of previous versions.
Some devices are designed to deny users access to install or run modified versions of
the software inside them, although the manufacturer can do so. This is fundamentally
incompatible with the aim of protecting users' freedom to change the software. The
systematic pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we have designed
this version of the GPL to prohibit the practice for those products. If such problems
arise substantially in other domains, we stand ready to extend this provision to
those domains in future versions of the GPL, as needed to protect the freedom of
users.
Finally, every program is threatened constantly by software patents. States should
not allow patents to restrict development and use of software on general-purpose
computers, but in those that do, we wish to avoid the special danger that patents
applied to a free program could make it effectively proprietary. To prevent this, the
GPL assures that patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and modification follow.
## TERMS AND CONDITIONS
### 0. Definitions.
“This License” refers to version 3 of the GNU General Public License.
“Copyright” also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
“The Program” refers to any copyrightable work licensed under this
License. Each licensee is addressed as “you”. “Licensees” and
“recipients” may be individuals or organizations.
To “modify” a work means to copy from or adapt all or part of the work in
a fashion requiring copyright permission, other than the making of an exact copy. The
resulting work is called a “modified version” of the earlier work or a
work “based on” the earlier work.
A “covered work” means either the unmodified Program or a work based on
the Program.
To “propagate” a work means to do anything with it that, without
permission, would make you directly or secondarily liable for infringement under
applicable copyright law, except executing it on a computer or modifying a private
copy. Propagation includes copying, distribution (with or without modification),
making available to the public, and in some countries other activities as well.
To “convey” a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through a computer
network, with no transfer of a copy, is not conveying.
An interactive user interface displays “Appropriate Legal Notices” to the
extent that it includes a convenient and prominently visible feature that (1)
displays an appropriate copyright notice, and (2) tells the user that there is no
warranty for the work (except to the extent that warranties are provided), that
licensees may convey the work under this License, and how to view a copy of this
License. If the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
### 1. Source Code.
The “source code” for a work means the preferred form of the work for
making modifications to it. “Object code” means any non-source form of a
work.
A “Standard Interface” means an interface that either is an official
standard defined by a recognized standards body, or, in the case of interfaces
specified for a particular programming language, one that is widely used among
developers working in that language.
The “System Libraries” of an executable work include anything, other than
the work as a whole, that (a) is included in the normal form of packaging a Major
Component, but which is not part of that Major Component, and (b) serves only to
enable use of the work with that Major Component, or to implement a Standard
Interface for which an implementation is available to the public in source code form.
A “Major Component”, in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system (if any) on which
the executable work runs, or a compiler used to produce the work, or an object code
interpreter used to run it.
The “Corresponding Source” for a work in object code form means all the
source code needed to generate, install, and (for an executable work) run the object
code and to modify the work, including scripts to control those activities. However,
it does not include the work's System Libraries, or general-purpose tools or
generally available free programs which are used unmodified in performing those
activities but which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for the work, and
the source code for shared libraries and dynamically linked subprograms that the work
is specifically designed to require, such as by intimate data communication or
control flow between those subprograms and other parts of the work.
The Corresponding Source need not include anything that users can regenerate
automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same work.
### 2. Basic Permissions.
All rights granted under this License are granted for the term of copyright on the
Program, and are irrevocable provided the stated conditions are met. This License
explicitly affirms your unlimited permission to run the unmodified Program. The
output from running a covered work is covered by this License only if the output,
given its content, constitutes a covered work. This License acknowledges your rights
of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey, without
conditions so long as your license otherwise remains in force. You may convey covered
works to others for the sole purpose of having them make modifications exclusively
for you, or provide you with facilities for running those works, provided that you
comply with the terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for you must do so
exclusively on your behalf, under your direction and control, on terms that prohibit
them from making any copies of your copyrighted material outside their relationship
with you.
Conveying under any other circumstances is permitted solely under the conditions
stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological measure under any
applicable law fulfilling obligations under article 11 of the WIPO copyright treaty
adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention
of such measures.
When you convey a covered work, you waive any legal power to forbid circumvention of
technological measures to the extent such circumvention is effected by exercising
rights under this License with respect to the covered work, and you disclaim any
intention to limit operation or modification of the work as a means of enforcing,
against the work's users, your or third parties' legal rights to forbid circumvention
of technological measures.
### 4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you receive it, in any
medium, provided that you conspicuously and appropriately publish on each copy an
appropriate copyright notice; keep intact all notices stating that this License and
any non-permissive terms added in accord with section 7 apply to the code; keep
intact all notices of the absence of any warranty; and give all recipients a copy of
this License along with the Program.
You may charge any price or no price for each copy that you convey, and you may offer
support or warranty protection for a fee.
### 5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to produce it from
the Program, in the form of source code under the terms of section 4, provided that
you also meet all of these conditions:
* a) The work must carry prominent notices stating that you modified it, and giving a
relevant date.
* b) The work must carry prominent notices stating that it is released under this
License and any conditions added under section 7. This requirement modifies the
requirement in section 4 to “keep intact all notices”.
* c) You must license the entire work, as a whole, under this License to anyone who
comes into possession of a copy. This License will therefore apply, along with any
applicable section 7 additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no permission to license the
work in any other way, but it does not invalidate such permission if you have
separately received it.
* d) If the work has interactive user interfaces, each must display Appropriate Legal
Notices; however, if the Program has interactive interfaces that do not display
Appropriate Legal Notices, your work need not make them do so.
A compilation of a covered work with other separate and independent works, which are
not by their nature extensions of the covered work, and which are not combined with
it such as to form a larger program, in or on a volume of a storage or distribution
medium, is called an “aggregate” if the compilation and its resulting
copyright are not used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work in an aggregate
does not cause this License to apply to the other parts of the aggregate.
### 6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of sections 4 and
5, provided that you also convey the machine-readable Corresponding Source under the
terms of this License, in one of these ways:
* a) Convey the object code in, or embodied in, a physical product (including a
physical distribution medium), accompanied by the Corresponding Source fixed on a
durable physical medium customarily used for software interchange.
* b) Convey the object code in, or embodied in, a physical product (including a
physical distribution medium), accompanied by a written offer, valid for at least
three years and valid for as long as you offer spare parts or customer support for
that product model, to give anyone who possesses the object code either (1) a copy of
the Corresponding Source for all the software in the product that is covered by this
License, on a durable physical medium customarily used for software interchange, for
a price no more than your reasonable cost of physically performing this conveying of
source, or (2) access to copy the Corresponding Source from a network server at no
charge.
* c) Convey individual copies of the object code with a copy of the written offer to
provide the Corresponding Source. This alternative is allowed only occasionally and
noncommercially, and only if you received the object code with such an offer, in
accord with subsection 6b.
* d) Convey the object code by offering access from a designated place (gratis or for
a charge), and offer equivalent access to the Corresponding Source in the same way
through the same place at no further charge. You need not require recipients to copy
the Corresponding Source along with the object code. If the place to copy the object
code is a network server, the Corresponding Source may be on a different server
(operated by you or a third party) that supports equivalent copying facilities,
provided you maintain clear directions next to the object code saying where to find
the Corresponding Source. Regardless of what server hosts the Corresponding Source,
you remain obligated to ensure that it is available for as long as needed to satisfy
these requirements.
* e) Convey the object code using peer-to-peer transmission, provided you inform
other peers where the object code and Corresponding Source of the work are being
offered to the general public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded from the
Corresponding Source as a System Library, need not be included in conveying the
object code work.
A “User Product” is either (1) a “consumer product”, which
means any tangible personal property which is normally used for personal, family, or
household purposes, or (2) anything designed or sold for incorporation into a
dwelling. In determining whether a product is a consumer product, doubtful cases
shall be resolved in favor of coverage. For a particular product received by a
particular user, “normally used” refers to a typical or common use of
that class of product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected to use, the
product. A product is a consumer product regardless of whether the product has
substantial commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
“Installation Information” for a User Product means any methods,
procedures, authorization keys, or other information required to install and execute
modified versions of a covered work in that User Product from a modified version of
its Corresponding Source. The information must suffice to ensure that the continued
functioning of the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or specifically for
use in, a User Product, and the conveying occurs as part of a transaction in which
the right of possession and use of the User Product is transferred to the recipient
in perpetuity or for a fixed term (regardless of how the transaction is
characterized), the Corresponding Source conveyed under this section must be
accompanied by the Installation Information. But this requirement does not apply if
neither you nor any third party retains the ability to install modified object code
on the User Product (for example, the work has been installed in ROM).
The requirement to provide Installation Information does not include a requirement to
continue to provide support service, warranty, or updates for a work that has been
modified or installed by the recipient, or for the User Product in which it has been
modified or installed. Access to a network may be denied when the modification itself
materially and adversely affects the operation of the network or violates the rules
and protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided, in accord with
this section must be in a format that is publicly documented (and with an
implementation available to the public in source code form), and must require no
special password or key for unpacking, reading or copying.
### 7. Additional Terms.
“Additional permissions” are terms that supplement the terms of this
License by making exceptions from one or more of its conditions. Additional
permissions that are applicable to the entire Program shall be treated as though they
were included in this License, to the extent that they are valid under applicable
law. If additional permissions apply only to part of the Program, that part may be
used separately under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option remove any
additional permissions from that copy, or from any part of it. (Additional
permissions may be written to require their own removal in certain cases when you
modify the work.) You may place additional permissions on material, added by you to a
covered work, for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you add to a
covered work, you may (if authorized by the copyright holders of that material)
supplement the terms of this License with terms:
* a) Disclaiming warranty or limiting liability differently from the terms of
sections 15 and 16 of this License; or
* b) Requiring preservation of specified reasonable legal notices or author
attributions in that material or in the Appropriate Legal Notices displayed by works
containing it; or
* c) Prohibiting misrepresentation of the origin of that material, or requiring that
modified versions of such material be marked in reasonable ways as different from the
original version; or
* d) Limiting the use for publicity purposes of names of licensors or authors of the
material; or
* e) Declining to grant rights under trademark law for use of some trade names,
trademarks, or service marks; or
* f) Requiring indemnification of licensors and authors of that material by anyone
who conveys the material (or modified versions of it) with contractual assumptions of
liability to the recipient, for any liability that these contractual assumptions
directly impose on those licensors and authors.
All other non-permissive additional terms are considered “further
restrictions” within the meaning of section 10. If the Program as you received
it, or any part of it, contains a notice stating that it is governed by this License
along with a term that is a further restriction, you may remove that term. If a
license document contains a further restriction but permits relicensing or conveying
under this License, you may add to a covered work material governed by the terms of
that license document, provided that the further restriction does not survive such
relicensing or conveying.
If you add terms to a covered work in accord with this section, you must place, in
the relevant source files, a statement of the additional terms that apply to those
files, or a notice indicating where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the form of a
separately written license, or stated as exceptions; the above requirements apply
either way.
### 8. Termination.
You may not propagate or modify a covered work except as expressly provided under
this License. Any attempt otherwise to propagate or modify it is void, and will
automatically terminate your rights under this License (including any patent licenses
granted under the third paragraph of section 11).
However, if you cease all violation of this License, then your license from a
particular copyright holder is reinstated (a) provisionally, unless and until the
copyright holder explicitly and finally terminates your license, and (b) permanently,
if the copyright holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is reinstated permanently
if the copyright holder notifies you of the violation by some reasonable means, this
is the first time you have received notice of violation of this License (for any
work) from that copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the licenses of
parties who have received copies or rights from you under this License. If your
rights have been terminated and not permanently reinstated, you do not qualify to
receive new licenses for the same material under section 10.
### 9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run a copy of the
Program. Ancillary propagation of a covered work occurring solely as a consequence of
using peer-to-peer transmission to receive a copy likewise does not require
acceptance. However, nothing other than this License grants you permission to
propagate or modify any covered work. These actions infringe copyright if you do not
accept this License. Therefore, by modifying or propagating a covered work, you
indicate your acceptance of this License to do so.
### 10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically receives a license
from the original licensors, to run, modify and propagate that work, subject to this
License. You are not responsible for enforcing compliance by third parties with this
License.
An “entity transaction” is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an organization, or
merging organizations. If propagation of a covered work results from an entity
transaction, each party to that transaction who receives a copy of the work also
receives whatever licenses to the work the party's predecessor in interest had or
could give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if the predecessor
has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the rights granted or
affirmed under this License. For example, you may not impose a license fee, royalty,
or other charge for exercise of rights granted under this License, and you may not
initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging
that any patent claim is infringed by making, using, selling, offering for sale, or
importing the Program or any portion of it.
### 11. Patents.
A “contributor” is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The work thus
licensed is called the contributor's “contributor version”.
A contributor's “essential patent claims” are all patent claims owned or
controlled by the contributor, whether already acquired or hereafter acquired, that
would be infringed by some manner, permitted by this License, of making, using, or
selling its contributor version, but do not include claims that would be infringed
only as a consequence of further modification of the contributor version. For
purposes of this definition, “control” includes the right to grant patent
sublicenses in a manner consistent with the requirements of this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free patent license
under the contributor's essential patent claims, to make, use, sell, offer for sale,
import and otherwise run, modify and propagate the contents of its contributor
version.
In the following three paragraphs, a “patent license” is any express
agreement or commitment, however denominated, not to enforce a patent (such as an
express permission to practice a patent or covenant not to sue for patent
infringement). To “grant” such a patent license to a party means to make
such an agreement or commitment not to enforce a patent against the party.
If you convey a covered work, knowingly relying on a patent license, and the
Corresponding Source of the work is not available for anyone to copy, free of charge
and under the terms of this License, through a publicly available network server or
other readily accessible means, then you must either (1) cause the Corresponding
Source to be so available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner consistent with
the requirements of this License, to extend the patent license to downstream
recipients. “Knowingly relying” means you have actual knowledge that, but
for the patent license, your conveying the covered work in a country, or your
recipient's use of the covered work in a country, would infringe one or more
identifiable patents in that country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or arrangement, you
convey, or propagate by procuring conveyance of, a covered work, and grant a patent
license to some of the parties receiving the covered work authorizing them to use,
propagate, modify or convey a specific copy of the covered work, then the patent
license you grant is automatically extended to all recipients of the covered work and
works based on it.
A patent license is “discriminatory” if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on the
non-exercise of one or more of the rights that are specifically granted under this
License. You may not convey a covered work if you are a party to an arrangement with
a third party that is in the business of distributing software, under which you make
payment to the third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties who would receive
the covered work from you, a discriminatory patent license (a) in connection with
copies of the covered work conveyed by you (or copies made from those copies), or (b)
primarily for and in connection with specific products or compilations that contain
the covered work, unless you entered into that arrangement, or that patent license
was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting any implied
license or other defenses to infringement that may otherwise be available to you
under applicable patent law.
### 12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or otherwise)
that contradict the conditions of this License, they do not excuse you from the
conditions of this License. If you cannot convey a covered work so as to satisfy
simultaneously your obligations under this License and any other pertinent
obligations, then as a consequence you may not convey it at all. For example, if you
agree to terms that obligate you to collect a royalty for further conveying from
those to whom you convey the Program, the only way you could satisfy both those terms
and this License would be to refrain entirely from conveying the Program.
### 13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have permission to link or
combine any covered work with a work licensed under version 3 of the GNU Affero
General Public License into a single combined work, and to convey the resulting work.
The terms of this License will continue to apply to the part which is the covered
work, but the special requirements of the GNU Affero General Public License, section
13, concerning interaction through a network will apply to the combination as such.
### 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of the GNU
General Public License from time to time. Such new versions will be similar in spirit
to the present version, but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program specifies that
a certain numbered version of the GNU General Public License “or any later
version” applies to it, you have the option of following the terms and
conditions either of that numbered version or of any later version published by the
Free Software Foundation. If the Program does not specify a version number of the GNU
General Public License, you may choose any version ever published by the Free
Software Foundation.
If the Program specifies that a proxy can decide which future versions of the GNU
General Public License can be used, that proxy's public statement of acceptance of a
version permanently authorizes you to choose that version for the Program.
Later license versions may give you additional or different permissions. However, no
additional obligations are imposed on any author or copyright holder as a result of
your choosing to follow a later version.
### 15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER
EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
### 16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE
OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE
WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
### 17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided above cannot be
given local legal effect according to their terms, reviewing courts shall apply local
law that most closely approximates an absolute waiver of all civil liability in
connection with the Program, unless a warranty or assumption of liability accompanies
a copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
## How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest possible use to
the public, the best way to achieve this is to make it free software which everyone
can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest to attach them
to the start of each source file to most effectively state the exclusion of warranty;
and each file should have at least the “copyright” line and a pointer to
where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short notice like this
when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type 'show c' for details.
The hypothetical commands `show w` and `show c` should show the appropriate parts of
the General Public License. Of course, your program's commands might be different;
for a GUI interface, you would use an “about box”.
You should also get your employer (if you work as a programmer) or school, if any, to
sign a “copyright disclaimer” for the program, if necessary. For more
information on this, and how to apply and follow the GNU GPL, see
<<http://www.gnu.org/licenses/>>.
The GNU General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may consider it
more useful to permit linking proprietary applications with the library. If this is
what you want to do, use the GNU Lesser General Public License instead of this
License. But first, please read
<<http://www.gnu.org/philosophy/why-not-lgpl.html>>.
|
PypiClean
|
/horovod-adasum-0.18.2a0.tar.gz/horovod-adasum-0.18.2a0/third_party/HTTPRequest/README.md
|
# HTTPRequest
HTTPRequest is a single-header library for making HTTP requests. You can just include it in your project and use it. HTTPRequest was tested on macOS, Windows and Linux, but it should work on most of the Linux-based platforms. Supports IPv4 and IPv6.
## Usage
```cpp
#include "HTTPRequest.hpp"
try
{
// you can pass http::InternetProtocol::V6 to Request to make an IPv6 request
http::Request request("http://test.com/test");
// send a get request
http::Response response = request.send("GET");
std::cout << std::string(response.body.begin(), response.body.end()) << std::endl; // print the result
// send a post request
response = request.send("POST", "foo=1&bar=baz", {
"Content-Type: application/x-www-form-urlencoded"
});
std::cout << std::string(response.body.begin(), response.body.end()) << std::endl; // print the result
// pass parameters as a map
std::map<std::string, std::string> parameters = {{"foo", "1"}, {"bar", "baz"}};
response = request.send("POST", parameters, {
"Content-Type: application/x-www-form-urlencoded"
});
std::cout << std::string(response.body.begin(), response.body.end()) << std::endl; // print the result
}
catch (const std::exception& e)
{
std::cerr << "Request failed, error: " << e.what() << std::endl;
}
```
## License
HTTPRequest codebase is licensed under the BSD license. Please refer to the LICENSE file for detailed information.
|
PypiClean
|
/django-folderless-0.5.0.tar.gz/django-folderless-0.5.0/folderless/static/folderless/js/vendor/jquery.iframe-transport.js
|
(function (factory) {
'use strict';
if (typeof define === 'function' && define.amd) {
// Register as an anonymous AMD module:
define(['jquery'], factory);
} else {
// Browser globals:
factory(window.jQuery);
}
}(function ($) {
'use strict';
// Helper variable to create unique names for the transport iframes:
var counter = 0;
// The iframe transport accepts four additional options:
// options.fileInput: a jQuery collection of file input fields
// options.paramName: the parameter name for the file form data,
// overrides the name property of the file input field(s),
// can be a string or an array of strings.
// options.formData: an array of objects with name and value properties,
// equivalent to the return data of .serializeArray(), e.g.:
// [{name: 'a', value: 1}, {name: 'b', value: 2}]
// options.initialIframeSrc: the URL of the initial iframe src,
// by default set to "javascript:false;"
$.ajaxTransport('iframe', function (options) {
if (options.async) {
// javascript:false as initial iframe src
// prevents warning popups on HTTPS in IE6:
/*jshint scripturl: true */
var initialIframeSrc = options.initialIframeSrc || 'javascript:false;',
/*jshint scripturl: false */
form,
iframe,
addParamChar;
return {
send: function (_, completeCallback) {
form = $('<form style="display:none;"></form>');
form.attr('accept-charset', options.formAcceptCharset);
addParamChar = /\?/.test(options.url) ? '&' : '?';
// XDomainRequest only supports GET and POST:
if (options.type === 'DELETE') {
options.url = options.url + addParamChar + '_method=DELETE';
options.type = 'POST';
} else if (options.type === 'PUT') {
options.url = options.url + addParamChar + '_method=PUT';
options.type = 'POST';
} else if (options.type === 'PATCH') {
options.url = options.url + addParamChar + '_method=PATCH';
options.type = 'POST';
}
// IE versions below IE8 cannot set the name property of
// elements that have already been added to the DOM,
// so we set the name along with the iframe HTML markup:
counter += 1;
iframe = $(
'<iframe src="' + initialIframeSrc +
'" name="iframe-transport-' + counter + '"></iframe>'
).bind('load', function () {
var fileInputClones,
paramNames = $.isArray(options.paramName) ?
options.paramName : [options.paramName];
iframe
.unbind('load')
.bind('load', function () {
var response;
// Wrap in a try/catch block to catch exceptions thrown
// when trying to access cross-domain iframe contents:
try {
response = iframe.contents();
// Google Chrome and Firefox do not throw an
// exception when calling iframe.contents() on
// cross-domain requests, so we unify the response:
if (!response.length || !response[0].firstChild) {
throw new Error();
}
} catch (e) {
response = undefined;
}
// The complete callback returns the
// iframe content document as response object:
completeCallback(
200,
'success',
{'iframe': response}
);
// Fix for IE endless progress bar activity bug
// (happens on form submits to iframe targets):
$('<iframe src="' + initialIframeSrc + '"></iframe>')
.appendTo(form);
window.setTimeout(function () {
// Removing the form in a setTimeout call
// allows Chrome's developer tools to display
// the response result
form.remove();
}, 0);
});
form
.prop('target', iframe.prop('name'))
.prop('action', options.url)
.prop('method', options.type);
if (options.formData) {
$.each(options.formData, function (index, field) {
$('<input type="hidden"/>')
.prop('name', field.name)
.val(field.value)
.appendTo(form);
});
}
if (options.fileInput && options.fileInput.length &&
options.type === 'POST') {
fileInputClones = options.fileInput.clone();
// Insert a clone for each file input field:
options.fileInput.after(function (index) {
return fileInputClones[index];
});
if (options.paramName) {
options.fileInput.each(function (index) {
$(this).prop(
'name',
paramNames[index] || options.paramName
);
});
}
// Appending the file input fields to the hidden form
// removes them from their original location:
form
.append(options.fileInput)
.prop('enctype', 'multipart/form-data')
// enctype must be set as encoding for IE:
.prop('encoding', 'multipart/form-data');
// Remove the HTML5 form attribute from the input(s):
options.fileInput.removeAttr('form');
}
form.submit();
// Insert the file input fields at their original location
// by replacing the clones with the originals:
if (fileInputClones && fileInputClones.length) {
options.fileInput.each(function (index, input) {
var clone = $(fileInputClones[index]);
// Restore the original name and form properties:
$(input)
.prop('name', clone.prop('name'))
.attr('form', clone.attr('form'));
clone.replaceWith(input);
});
}
});
form.append(iframe).appendTo(document.body);
},
abort: function () {
if (iframe) {
// javascript:false as iframe src aborts the request
// and prevents warning popups on HTTPS in IE6.
// concat is used to avoid the "Script URL" JSLint error:
iframe
.unbind('load')
.prop('src', initialIframeSrc);
}
if (form) {
form.remove();
}
}
};
}
});
// The iframe transport returns the iframe content document as response.
// The following adds converters from iframe to text, json, html, xml
// and script.
// Please note that the Content-Type for JSON responses has to be text/plain
// or text/html, if the browser doesn't include application/json in the
// Accept header, else IE will show a download dialog.
// The Content-Type for XML responses on the other hand has to be always
// application/xml or text/xml, so IE properly parses the XML response.
// See also
// https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation
$.ajaxSetup({
converters: {
'iframe text': function (iframe) {
return iframe && $(iframe[0].body).text();
},
'iframe json': function (iframe) {
return iframe && $.parseJSON($(iframe[0].body).text());
},
'iframe html': function (iframe) {
return iframe && $(iframe[0].body).html();
},
'iframe xml': function (iframe) {
var xmlDoc = iframe && iframe[0];
return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc :
$.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) ||
$(xmlDoc.body).html());
},
'iframe script': function (iframe) {
return iframe && $.globalEval($(iframe[0].body).text());
}
}
});
}));
|
PypiClean
|
/pulumi_alicloud-3.44.0a1693632188.tar.gz/pulumi_alicloud-3.44.0a1693632188/pulumi_alicloud/cms/get_group_metric_rules.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetGroupMetricRulesResult',
'AwaitableGetGroupMetricRulesResult',
'get_group_metric_rules',
'get_group_metric_rules_output',
]
@pulumi.output_type
class GetGroupMetricRulesResult:
"""
A collection of values returned by getGroupMetricRules.
"""
def __init__(__self__, dimensions=None, enable_state=None, group_id=None, group_metric_rule_name=None, id=None, ids=None, metric_name=None, name_regex=None, names=None, namespace=None, output_file=None, rules=None, status=None):
if dimensions and not isinstance(dimensions, str):
raise TypeError("Expected argument 'dimensions' to be a str")
pulumi.set(__self__, "dimensions", dimensions)
if enable_state and not isinstance(enable_state, bool):
raise TypeError("Expected argument 'enable_state' to be a bool")
pulumi.set(__self__, "enable_state", enable_state)
if group_id and not isinstance(group_id, str):
raise TypeError("Expected argument 'group_id' to be a str")
pulumi.set(__self__, "group_id", group_id)
if group_metric_rule_name and not isinstance(group_metric_rule_name, str):
raise TypeError("Expected argument 'group_metric_rule_name' to be a str")
pulumi.set(__self__, "group_metric_rule_name", group_metric_rule_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if ids and not isinstance(ids, list):
raise TypeError("Expected argument 'ids' to be a list")
pulumi.set(__self__, "ids", ids)
if metric_name and not isinstance(metric_name, str):
raise TypeError("Expected argument 'metric_name' to be a str")
pulumi.set(__self__, "metric_name", metric_name)
if name_regex and not isinstance(name_regex, str):
raise TypeError("Expected argument 'name_regex' to be a str")
pulumi.set(__self__, "name_regex", name_regex)
if names and not isinstance(names, list):
raise TypeError("Expected argument 'names' to be a list")
pulumi.set(__self__, "names", names)
if namespace and not isinstance(namespace, str):
raise TypeError("Expected argument 'namespace' to be a str")
pulumi.set(__self__, "namespace", namespace)
if output_file and not isinstance(output_file, str):
raise TypeError("Expected argument 'output_file' to be a str")
pulumi.set(__self__, "output_file", output_file)
if rules and not isinstance(rules, list):
raise TypeError("Expected argument 'rules' to be a list")
pulumi.set(__self__, "rules", rules)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def dimensions(self) -> Optional[str]:
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="enableState")
def enable_state(self) -> Optional[bool]:
return pulumi.get(self, "enable_state")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[str]:
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="groupMetricRuleName")
def group_metric_rule_name(self) -> Optional[str]:
return pulumi.get(self, "group_metric_rule_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def ids(self) -> Sequence[str]:
return pulumi.get(self, "ids")
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> Optional[str]:
return pulumi.get(self, "metric_name")
@property
@pulumi.getter(name="nameRegex")
def name_regex(self) -> Optional[str]:
return pulumi.get(self, "name_regex")
@property
@pulumi.getter
def names(self) -> Sequence[str]:
return pulumi.get(self, "names")
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="outputFile")
def output_file(self) -> Optional[str]:
return pulumi.get(self, "output_file")
@property
@pulumi.getter
def rules(self) -> Sequence['outputs.GetGroupMetricRulesRuleResult']:
return pulumi.get(self, "rules")
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
class AwaitableGetGroupMetricRulesResult(GetGroupMetricRulesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetGroupMetricRulesResult(
dimensions=self.dimensions,
enable_state=self.enable_state,
group_id=self.group_id,
group_metric_rule_name=self.group_metric_rule_name,
id=self.id,
ids=self.ids,
metric_name=self.metric_name,
name_regex=self.name_regex,
names=self.names,
namespace=self.namespace,
output_file=self.output_file,
rules=self.rules,
status=self.status)
def get_group_metric_rules(dimensions: Optional[str] = None,
enable_state: Optional[bool] = None,
group_id: Optional[str] = None,
group_metric_rule_name: Optional[str] = None,
ids: Optional[Sequence[str]] = None,
metric_name: Optional[str] = None,
name_regex: Optional[str] = None,
namespace: Optional[str] = None,
output_file: Optional[str] = None,
status: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupMetricRulesResult:
"""
This data source provides the Cms Group Metric Rules of the current Alibaba Cloud user.
> **NOTE:** Available in v1.104.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.cms.get_group_metric_rules(ids=["4a9a8978-a9cc-55ca-aa7c-530ccd91ae57"],
name_regex="the_resource_name")
pulumi.export("firstCmsGroupMetricRuleId", example.rules[0].id)
```
:param str dimensions: The dimensions that specify the resources to be associated with the alert rule.
:param bool enable_state: Indicates whether the alert rule is enabled.
:param str group_id: The ID of the application group.
:param str group_metric_rule_name: The name of the alert rule.
:param Sequence[str] ids: A list of Group Metric Rule IDs.
:param str metric_name: The name of the metric.
:param str name_regex: A regex string to filter results by Group Metric Rule name.
:param str namespace: The namespace of the service.
:param str output_file: File name where to save data source results (after running `pulumi preview`).
:param str status: The status of Group Metric Rule..
"""
__args__ = dict()
__args__['dimensions'] = dimensions
__args__['enableState'] = enable_state
__args__['groupId'] = group_id
__args__['groupMetricRuleName'] = group_metric_rule_name
__args__['ids'] = ids
__args__['metricName'] = metric_name
__args__['nameRegex'] = name_regex
__args__['namespace'] = namespace
__args__['outputFile'] = output_file
__args__['status'] = status
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('alicloud:cms/getGroupMetricRules:getGroupMetricRules', __args__, opts=opts, typ=GetGroupMetricRulesResult).value
return AwaitableGetGroupMetricRulesResult(
dimensions=pulumi.get(__ret__, 'dimensions'),
enable_state=pulumi.get(__ret__, 'enable_state'),
group_id=pulumi.get(__ret__, 'group_id'),
group_metric_rule_name=pulumi.get(__ret__, 'group_metric_rule_name'),
id=pulumi.get(__ret__, 'id'),
ids=pulumi.get(__ret__, 'ids'),
metric_name=pulumi.get(__ret__, 'metric_name'),
name_regex=pulumi.get(__ret__, 'name_regex'),
names=pulumi.get(__ret__, 'names'),
namespace=pulumi.get(__ret__, 'namespace'),
output_file=pulumi.get(__ret__, 'output_file'),
rules=pulumi.get(__ret__, 'rules'),
status=pulumi.get(__ret__, 'status'))
@_utilities.lift_output_func(get_group_metric_rules)
def get_group_metric_rules_output(dimensions: Optional[pulumi.Input[Optional[str]]] = None,
enable_state: Optional[pulumi.Input[Optional[bool]]] = None,
group_id: Optional[pulumi.Input[Optional[str]]] = None,
group_metric_rule_name: Optional[pulumi.Input[Optional[str]]] = None,
ids: Optional[pulumi.Input[Optional[Sequence[str]]]] = None,
metric_name: Optional[pulumi.Input[Optional[str]]] = None,
name_regex: Optional[pulumi.Input[Optional[str]]] = None,
namespace: Optional[pulumi.Input[Optional[str]]] = None,
output_file: Optional[pulumi.Input[Optional[str]]] = None,
status: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetGroupMetricRulesResult]:
"""
This data source provides the Cms Group Metric Rules of the current Alibaba Cloud user.
> **NOTE:** Available in v1.104.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.cms.get_group_metric_rules(ids=["4a9a8978-a9cc-55ca-aa7c-530ccd91ae57"],
name_regex="the_resource_name")
pulumi.export("firstCmsGroupMetricRuleId", example.rules[0].id)
```
:param str dimensions: The dimensions that specify the resources to be associated with the alert rule.
:param bool enable_state: Indicates whether the alert rule is enabled.
:param str group_id: The ID of the application group.
:param str group_metric_rule_name: The name of the alert rule.
:param Sequence[str] ids: A list of Group Metric Rule IDs.
:param str metric_name: The name of the metric.
:param str name_regex: A regex string to filter results by Group Metric Rule name.
:param str namespace: The namespace of the service.
:param str output_file: File name where to save data source results (after running `pulumi preview`).
:param str status: The status of Group Metric Rule..
"""
...
|
PypiClean
|
/django-canjs-0.1.2.tar.gz/django-canjs-0.1.2/canjs/static/canjs/1.1.1/can.mootools.min.js
|
(function(n,l){var c=n.can||{};if("undefined"===typeof GLOBALCAN||!1!==GLOBALCAN)n.can=c;c.isDeferred=function(a){var b=this.isFunction;return a&&b(a.then)&&b(a.pipe)};c.addEvent=function(a,b){this.__bindEvents||(this.__bindEvents={});var d=a.split(".")[0];this.__bindEvents[d]||(this.__bindEvents[d]=[]);this.__bindEvents[d].push({handler:b,name:a});return this};c.removeEvent=function(a,b){if(this.__bindEvents){for(var d=0,c=this.__bindEvents[a.split(".")[0]],f;d<c.length;)f=c[d],b&&f.handler===b||
!b&&f.name===a?c.splice(d,1):d++;return this}};c.dispatch=function(a){if(this.__bindEvents){var b=(this.__bindEvents[a.type.split(".")[0]]||[]).slice(0),d=this,e=[a].concat(a.data||[]);c.each(b,function(b){a.data=e.slice(1);b.handler.apply(d,e)})}};var Ma=/^\s*<(\w+)[^>]*>/,Na=function(a,b){b===l&&(b=Ma.test(a)&&RegExp.$1);a&&c.isFunction(a.replace)&&(a=a.replace(/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,"<$1></$2>"));var d=document.createElement("div"),e=document.createElement("div");
"tbody"===b||"tfoot"===b||"thead"===b?(e.innerHTML="<table>"+a+"</table>",d=3===e.firstChild.nodeType?e.lastChild:e.firstChild):"tr"===b?(e.innerHTML="<table><tbody>"+a+"</tbody></table>",d=3===e.firstChild.nodeType?e.lastChild:e.firstChild.firstChild):"td"===b||"th"===b?(e.innerHTML="<table><tbody><tr>"+a+"</tr></tbody></table>",d=3===e.firstChild.nodeType?e.lastChild:e.firstChild.firstChild.firstChild):"option"===b?(e.innerHTML="<select>"+a+"</select>",d=3===e.firstChild.nodeType?e.lastChild:e.firstChild):
d.innerHTML=""+a;e={};d=d.childNodes;e.length=d.length;for(var f=0;f<d.length;f++)e[f]=d[f];return[].slice.call(e)};c.buildFragment=function(a){var b=Na(a),a=a.toString().match(/@@!!@@/g),d=document.createDocumentFragment(),a=null===a?0:a.length;c.each(b,function(a){d.appendChild(a)});0<a&&(d.lastChild&&3===d.lastChild.nodeType&&d.lastChild.textContent&&0<=d.lastChild.textContent.indexOf("@@!!@@"))&&(d.lastChild.textContent=d.lastChild.textContent.substring(0,d.lastChild.textContent.length-6*d.lastChild.textContent.length));
return d};var m=function(a,b){for(var d in b)b.hasOwnProperty(d)&&(a[d]=b[d])},z=function(a){if(!(this instanceof z))return new z;this._doneFuncs=[];this._failFuncs=[];this._resultArgs=null;this._status="";a&&a.call(this,this)};c.Deferred=z;c.when=z.when=function(){var a=c.makeArray(arguments);if(2>a.length){var b=a[0];return b&&c.isFunction(b.isResolved)&&c.isFunction(b.isRejected)?b:z().resolve(b)}var d=z(),e=0,f=[];c.each(a,function(b,c){b.done(function(){f[c]=2>arguments.length?arguments[0]:arguments;
++e==a.length&&d.resolve.apply(d,f)}).fail(function(){d.reject(arguments)})});return d};var ka=function(a,b){return function(d){var c=this._resultArgs=1<arguments.length?arguments[1]:[];return this.exec(d,this[a],c,b)}},la=function(a,b){return function(){var d=this;c.each(Array.prototype.slice.call(arguments),function(c,f,h){c&&(c.constructor===Array?h.callee.apply(d,c):(d._status===b&&c.apply(d,d._resultArgs||[]),d[a].push(c)))});return this}};m(z.prototype,{pipe:function(a,b){var d=c.Deferred();
this.done(function(){d.resolve(a.apply(this,arguments))});this.fail(function(){b?d.reject(b.apply(this,arguments)):d.reject.apply(d,arguments)});return d},resolveWith:ka("_doneFuncs","rs"),rejectWith:ka("_failFuncs","rj"),done:la("_doneFuncs","rs"),fail:la("_failFuncs","rj"),always:function(){var a=c.makeArray(arguments);a.length&&a[0]&&this.done(a[0]).fail(a[0]);return this},then:function(){var a=c.makeArray(arguments);1<a.length&&a[1]&&this.fail(a[1]);a.length&&a[0]&&this.done(a[0]);return this},
state:function(){switch(this._status){case "rs":return"resolved";case "rj":return"rejected";default:return"pending"}},isResolved:function(){return"rs"===this._status},isRejected:function(){return"rj"===this._status},reject:function(){return this.rejectWith(this,arguments)},resolve:function(){return this.resolveWith(this,arguments)},exec:function(a,b,d,e){if(""!==this._status)return this;this._status=e;c.each(b,function(b){b.apply(a,d)});return this}});c.each=function(a,b,d){var c=0,f;if(a)if("number"===
typeof a.length&&a.pop){a.attr&&a.attr("length");for(f=a.length;c<f&&!1!==b.call(d||a[c],a[c],c,a);c++);}else if(a.hasOwnProperty)for(f in a)if(a.hasOwnProperty(f)&&!1===b.call(d||a[f],a[f],f,a))break;return a};var P=Object.prototype.hasOwnProperty;c.isPlainObject=function(a){if(!a||"object"!==typeof a||a.nodeType||null!=a&&a==a.window)return!1;try{if(a.constructor&&!P.call(a,"constructor")&&!P.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(b){return!1}for(var d in a);return d===l||
P.call(a,d)};m=function(){var a,b,d,e,f,h=arguments[0]||{},g=1,i=arguments.length,s=!1;"boolean"===typeof h&&(s=h,h=arguments[1]||{},g=2);"object"!==typeof h&&!c.isFunction(h)&&(h={});i===g&&(h=this,--g);for(;g<i;g++)if(null!=(a=arguments[g]))for(b in a)d=h[b],e=a[b],h!==e&&(s&&e&&(c.isPlainObject(e)||(f=c.isArray(e)))?(f?(f=!1,d=d&&c.isArray(d)?d:[]):d=d&&c.isPlainObject(d)?d:{},h[b]=c.extend(s,d,e)):e!==l&&(h[b]=e));return h};c.extend=m;c.trim=function(a){return a&&a.trim()};c.makeArray=function(a){if(null==
a)return[];try{return Type.isEnumerable(a)&&"string"!=typeof a?Array.prototype.slice.call(a):[a]}catch(b){var d=[],c;for(c=0;c<a.length;++c)d.push(a[c]);return d}};c.isArray=function(a){return"array"===typeOf(a)};c.inArray=function(a,b){return!b?-1:Array.prototype.indexOf.call(b,a)};c.map=function(a,b){return Array.from(a||[]).map(b)};c.param=function(a){return Object.toQueryString(a)};c.isEmptyObject=function(a){return 0===Object.keys(a).length};c.proxy=function(a){var b=c.makeArray(arguments),a=
b.shift();return a.bind.apply(a,b)};c.isFunction=function(a){return"function"==typeOf(a)};c.bind=function(a,b){this.bind&&this.bind!==c.bind?this.bind(a,b):this.addEvent?this.addEvent(a,b):c.addEvent.call(this,a,b);return this};c.unbind=function(a,b){this.unbind&&this.unbind!==c.unbind?this.unbind(a,b):this.removeEvent?this.removeEvent(a,b):c.removeEvent.call(this,a,b);return this};c.trigger=function(a,b,d,e){e=e===l?!0:e;d=d||[];if(a.fireEvent)for(a=a[0]||a;a;){b.type||(b={type:b,target:a});var f=
a!==n?c.$(a).retrieve("events")[0]:a.retrieve("events");f&&f[b.type]&&f[b.type].keys.each(function(a){a.apply(this,[b].concat(d))},this);a=e&&a.parentNode?a.parentNode:null}else"string"===typeof b&&(b={type:b}),b.target=b.target||a,b.data=d,c.dispatch.call(a,b)};c.delegate=function(a,b,d){this.delegate?this.delegate(a,b,d):this.addEvent&&this.addEvent(b+":relay("+a+")",d);return this};c.undelegate=function(a,b,d){this.undelegate?this.undelegate(a,b,d):this.removeEvent&&this.removeEvent(b+":relay("+
a+")",d);return this};var ma={type:"method",success:l,error:l},Q=function(a,b){for(var d in a)b[d]="function"==typeof b[d]?function(){a[d].apply(a,arguments)}:d[a]};c.ajax=function(a){var b=c.Deferred(),d=c.extend({},a),e;for(e in ma)d[e]!==l&&(d[ma[e]]=d[e],delete d[e]);d.method=d.method||"get";var f=a.success,h=a.error;d.onSuccess=function(d){"json"===a.dataType&&(d=eval("("+d+")"));Q(g.xhr,b);b.resolve(d,"success",g.xhr);f&&f(d,"success",g.xhr)};d.onError=function(){Q(g.xhr,b);b.reject(g.xhr,"error");
h(g.xhr,"error")};var g=new Request(d);g.send();Q(g.xhr,b);return b};c.$=function(a){return a===n?n:$$(a)};var Oa=document.id;document.id=function(a){return a&&11===a.nodeType?a:Oa.apply(document,arguments)};c.append=function(a,b){"string"===typeof b&&(b=c.buildFragment(b));return a.grab(b)};c.filter=function(a,b){return a.filter(b)};c.data=function(a,b,d){return d===l?a[0].retrieve(b):a.store(b,d)};c.addClass=function(a,b){return a.addClass(b)};c.remove=function(a){a=a.filter(function(a){if(1!==
a.nodeType)a.parentNode.removeChild(a);else return!0});a.destroy();return a};var Pa=Element.prototype.destroy;Element.implement({destroy:function(){c.trigger(this,"destroyed",[],!1);for(var a=this.getElementsByTagName("*"),b=0,d;(d=a[b])!==l;b++)c.trigger(d,"destroyed",[],!1);Pa.apply(this,arguments)}});c.get=function(a,b){return a[b]};var Qa=Slick.uidOf;Slick.uidOf=function(a){return 1===a.nodeType||a===n?Qa(a):Math.random()};var Ra=/\=\=/,Sa=/([A-Z]+)([A-Z][a-z])/g,Ta=/([a-z\d])([A-Z])/g,Ua=/([a-z\d])([A-Z])/g,
na=/\{([^\}]+)\}/g,x=/"/g,Va=/'/g;c.extend(c,{esc:function(a){return(""+(null===a||a===l||isNaN(a)&&"NaN"===""+a?"":a)).replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(x,""").replace(Va,"'")},getObject:function(a,b,d){var a=a?a.split("."):[],e=a.length,f,h=0,g,i,b=c.isArray(b)?b:[b||n];if(!e)return b[0];for(;b[h];){f=b[h];for(i=0;i<e-1&&/^f|^o/.test(typeof f);i++)f=a[i]in f?f[a[i]]:d&&(f[a[i]]={});if(/^f|^o/.test(typeof f)&&(g=a[i]in f?f[a[i]]:d&&(f[a[i]]={}),g!==l))return!1===
d&&delete f[a[i]],g;h++}},capitalize:function(a){return a.charAt(0).toUpperCase()+a.slice(1)},underscore:function(a){return a.replace(Ra,"/").replace(Sa,"$1_$2").replace(Ta,"$1_$2").replace(Ua,"_").toLowerCase()},sub:function(a,b,d){var e=[];e.push(a.replace(na,function(a,h){var g=c.getObject(h,b,d===l?d:!d);return/^f|^o/.test(typeof g)?(e.push(g),""):""+g}));return 1>=e.length?e[0]:e},replacer:na,undHash:/_|-/});var R=0;c.Construct=function(){if(arguments.length)return c.Construct.extend.apply(c.Construct,
arguments)};c.extend(c.Construct,{newInstance:function(){var a=this.instance(),b;a.setup&&(b=a.setup.apply(a,arguments));a.init&&a.init.apply(a,b||arguments);return a},_inherit:function(a,b,d){c.extend(d||a,a||{})},_overwrite:function(a,b,d,c){a[d]=c},setup:function(a){this.defaults=c.extend(!0,{},a.defaults,this.defaults)},instance:function(){R=1;var a=new this;R=0;return a},extend:function(a,b,d){function e(){if(!R)return this.constructor!==e&&arguments.length?arguments.callee.extend.apply(arguments.callee,
arguments):this.constructor.newInstance.apply(this.constructor,arguments)}"string"!=typeof a&&(d=b,b=a,a=null);d||(d=b,b=null);var d=d||{},f=this.prototype,h,g,i,s;s=this.instance();c.Construct._inherit(d,f,s);for(h in this)this.hasOwnProperty(h)&&(e[h]=this[h]);c.Construct._inherit(b,this,e);if(a){i=a.split(".");g=i.pop();i=f=c.getObject(i.join("."),n,!0);var l=c.underscore(a.replace(/\./g,"_")),t=c.underscore(g);f[g]=e}c.extend(e,{constructor:e,prototype:s,namespace:i,shortName:g,_shortName:t,fullName:a,
_fullName:l});e.prototype.constructor=e;g=[this].concat(c.makeArray(arguments));s=e.setup.apply(e,g);e.init&&e.init.apply(e,s||g);return e}});var p=function(a){return a&&(c.isArray(a)||c.isPlainObject(a)||a instanceof c.Observe)},K=function(a,b){return c.each(a,function(a){a&&a.unbind&&a.unbind("change"+b)})},S=function(a,b,d,e,f){e=e||j;f=f||j.List;a instanceof j?K([a],d._cid):a=c.isArray(a)?new f(a):new e(a);a.bind("change"+d._cid,function(){var e=c.makeArray(arguments),f=e.shift();e[0]=("*"===
b?[d.indexOf(a),e[0]]:[b,e[0]]).join(".");f.triggeredNS=f.triggeredNS||{};f.triggeredNS[d._cid]||(f.triggeredNS[d._cid]=!0,c.trigger(d,f,e))});return a},L=function(a,b,d){a.each(function(a,f){d[f]=p(a)&&c.isFunction(a[b])?a[b]():a});return d},oa=function(a){return function(){return c[a].apply(this,arguments)}},E=oa("addEvent"),pa=oa("removeEvent"),T=function(a){return c.isArray(a)?a:(""+a).split(".")},qa=1,F=0,U=[],V=[],Wa=0;c.cid=function(a,b){return a._cid?a._cid:a._cid=(b||"")+ ++Wa};var j=c.Observe=
c.Construct({bind:E,unbind:pa,id:"id",canMakeObserve:p,startBatch:function(a){F++;a&&V.push(a)},stopBatch:function(a,b){a?F=0:F--;if(0==F){var d=U.slice(0),e=V.slice(0);U=[];V=[];qa++;b&&this.startBatch();c.each(d,function(a){c.trigger.apply(c,a)});c.each(e,function(a){a})}},triggerBatch:function(a,b,d){if(!a._init){if(0==F)return c.trigger(a,b,d);U.push([a,{type:b,batchNum:qa},d])}},keys:function(a){var b=[];j.__reading&&j.__reading(a,"__keys");for(var d in a._data)b.push(d);return b}},{setup:function(a){this._data=
{};c.cid(this,".observe");this._init=1;this.attr(a);this.bind("change"+this._cid,c.proxy(this._changes,this));delete this._init},_changes:function(a,b,d,c,f){j.triggerBatch(this,{type:b,batchNum:a.batchNum},[c,f])},attr:function(a,b){var d=typeof a;if("string"!==d&&"number"!==d)return this._attrs(a,b);if(b===l)return j.__reading&&j.__reading(this,a),this._get(a);this._set(a,b);return this},each:function(){j.__reading&&j.__reading(this,"__keys");return c.each.apply(l,[this.__get()].concat(c.makeArray(arguments)))},
removeAttr:function(a){var a=T(a),b=a.shift(),d=this._data[b];if(a.length)return d.removeAttr(a);b in this._data&&(delete this._data[b],b in this.constructor.prototype||delete this[b],j.triggerBatch(this,"__keys",l),j.triggerBatch(this,"change",[b,"remove",l,d]),j.triggerBatch(this,b,[l,d]));return d},_get:function(a){var a=T(a),b=this.__get(a.shift());return a.length?b?b._get(a):l:b},__get:function(a){return a?this._data[a]:this._data},_set:function(a,b){var d=T(a),c=d.shift(),f=this.__get(c);if(p(f)&&
d.length)f._set(d,b);else{if(d.length)throw"can.Observe: Object does not exist";this.__convert&&(b=this.__convert(c,b));f||j.triggerBatch(this,"__keys",l);this.__set(c,b,f)}},__set:function(a,b,d){if(b!==d){var c=this.__get().hasOwnProperty(a)?"set":"add";this.___set(a,p(b)?S(b,a,this):b);j.triggerBatch(this,"change",[a,c,b,d]);d&&K([d],this._cid)}},___set:function(a,b){this._data[a]=b;a in this.constructor.prototype||(this[a]=b)},bind:E,unbind:pa,serialize:function(){return L(this,"serialize",{})},
_attrs:function(a,b){if(a===l)return L(this,"attr",{});var a=c.extend(!0,{},a),d,e=this,f;j.startBatch();this.each(function(d,g,i){f=a[g];f===l?b&&e.removeAttr(g):(e.__convert&&(f=e.__convert(g,f)),d!==f&&(d instanceof c.Observe&&f instanceof c.Observe&&K([d],e._cid),f instanceof c.Observe?e._set(g,f):p(d)&&p(f)?d.attr(f,i):d!=f&&e._set(g,f)),delete a[g])});for(d in a)f=a[d],this._set(d,f);j.stopBatch();return this}}),Xa=[].splice,M=j({setup:function(a,b){this.length=0;c.cid(this,".observe");this._init=
1;this.push.apply(this,c.makeArray(a||[]));this.bind("change"+this._cid,c.proxy(this._changes,this));c.extend(this,b);delete this._init},_changes:function(a,b,d,c,f){~b.indexOf(".")||("add"===d?(j.triggerBatch(this,d,[c,+b]),j.triggerBatch(this,"length",[this.length])):"remove"===d?(j.triggerBatch(this,d,[f,+b]),j.triggerBatch(this,"length",[this.length])):j.triggerBatch(this,d,[c,+b]));j.prototype._changes.apply(this,arguments)},__get:function(a){return a?this[a]:this},___set:function(a,b){this[a]=
b;+a>=this.length&&(this.length=+a+1)},serialize:function(){return L(this,"serialize",[])},splice:function(a,b){var d=c.makeArray(arguments),e;for(e=2;e<d.length;e++){var f=d[e];p(f)&&(d[e]=S(f,"*",this))}b===l&&(b=d[1]=this.length-a);e=Xa.apply(this,d);0<b&&(j.triggerBatch(this,"change",[""+a,"remove",l,e]),K(e,this._cid));2<d.length&&j.triggerBatch(this,"change",[""+a,"add",d.slice(2),e]);return e},_attrs:function(a,b){if(a===l)return L(this,"attr",[]);a=c.makeArray(a);j.startBatch();this._updateAttrs(a,
b);j.stopBatch()},_updateAttrs:function(a,b){for(var d=Math.min(a.length,this.length),c=0;c<d;c++){var f=this[c],h=a[c];p(f)&&p(h)?f.attr(h,b):f!=h&&this._set(c,h)}a.length>this.length?this.push.apply(this,a.slice(this.length)):a.length<this.length&&b&&this.splice(a.length)}});c.each({push:"length",unshift:0},function(a,b){M.prototype[b]=function(){for(var d=arguments[0]&&c.isArray(arguments[0])?arguments[0]:c.makeArray(arguments),e=a?this.length:0,f=0;f<d.length;f++){var h=d[f];p(h)&&(d[f]=S(h,"*",
this,this.constructor.Observe,this.constructor))}f=[][b].apply(this,d);(!this.comparator||!d.length)&&j.triggerBatch(this,"change",[""+e,"add",d,l]);return f}});c.each({pop:"length",shift:0},function(a,b){M.prototype[b]=function(){var d=arguments[0]&&c.isArray(arguments[0])?arguments[0]:c.makeArray(arguments),e=a&&this.length?this.length-1:0,d=[][b].apply(this,d);j.triggerBatch(this,"change",[""+e,"remove",l,[d]]);d&&d.unbind&&d.unbind("change"+this._cid);return d}});c.extend(M.prototype,{indexOf:function(a){this.attr("length");
return c.inArray(a,this)},join:[].join,slice:function(){return new this.constructor(Array.prototype.slice.apply(this,arguments))},concat:function(){var a=[];c.each(c.makeArray(arguments),function(b,d){a[d]=b instanceof c.Observe.List?b.serialize():b});return new this.constructor(Array.prototype.concat.apply(this.serialize(),a))},forEach:function(a,b){c.each(this,a,b||this)}});j.List=M;j.setup=function(){c.Construct.setup.apply(this,arguments);this.List=j.List({Observe:this},{})};var Ya=function(a,
b,d){var e=new c.Deferred;a.then(function(){var a=c.makeArray(arguments);a[0]=b[d](a[0]);e.resolveWith(e,a)},function(){e.rejectWith(this,arguments)});"function"===typeof a.abort&&(e.abort=function(){return a.abort()});return e},Za=0,ra=/change.observe\d+/,G=function(a){c.Observe.__reading&&c.Observe.__reading(a,a.constructor.id);return a.__get(a.constructor.id)},sa=function(a,b,d,c,f){var h;h=[a.serialize()];var g=a.constructor,i;"destroy"==b&&h.shift();"create"!==b&&h.unshift(G(a));i=g[b].apply(g,
h);h=i.pipe(function(d){a[f||b+"d"](d,i);return a});i.abort&&(h.abort=function(){i.abort()});h.then(d,c);return h},$a={create:{url:"_shortName",type:"post"},update:{data:function(a,b){var b=b||{},d=this.id;b[d]&&b[d]!==a&&(b["new"+c.capitalize(a)]=b[d],delete b[d]);b[d]=a;return b},type:"put"},destroy:{type:"delete",data:function(a){var b={};b.id=b[this.id]=a;return b}},findAll:{url:"_shortName"},findOne:{}},ta=function(a,b){return function(d){var d=a.data?a.data.apply(this,arguments):d,e=b||this[a.url||
"_url"],f=d,h=a.type||"get",g={};"string"==typeof e?(e=e.split(/\s/),g.url=e.pop(),e.length&&(g.type=e.pop())):c.extend(g,e);g.data="object"==typeof f&&!c.isArray(f)?c.extend(g.data||{},f):f;g.url=c.sub(g.url,g.data,!0);return c.ajax(c.extend({type:h||"post",dataType:"json",success:void 0,error:void 0},g))}};c.Model=c.Observe({fullName:"can.Model",setup:function(a){this.store={};c.Observe.setup.apply(this,arguments);if(c.Model){this.List=W({Observe:this},{});var b=this,d=c.proxy(this._clean,b);c.each($a,
function(e,f){c.isFunction(b[f])||(b[f]=ta(e,b[f]));if(b["make"+c.capitalize(f)]){var h=b["make"+c.capitalize(f)](b[f]);c.Construct._overwrite(b,a,f,function(){this._reqs++;var a=h.apply(this,arguments),b=a.then(d,d);b.abort=a.abort;return b})}});if("can.Model"==b.fullName||!b.fullName)b.fullName="Model"+ ++Za;this._reqs=0;this._url=this._shortName+"/{"+this.id+"}"}},_ajax:ta,_clean:function(a){this._reqs--;if(!this._reqs)for(var b in this.store)this.store[b]._bindings||delete this.store[b];return a},
models:function(a,b){if(a){if(a instanceof this.List)return a;var d=this,e=[],f=b instanceof c.Observe.List?b:new (d.List||W),h=c.isArray(a),g=a instanceof W,g=h?a:g?a.serialize():a.data;0<f.length&&f.splice(0);c.each(g,function(a){e.push(d.model(a))});f.push.apply(f,e);h||c.each(a,function(a,b){"data"!==b&&f.attr(b,a)});return f}},model:function(a){if(a){a instanceof this&&(a=a.serialize());var b=a[this.id],b=b&&this.store[b]?this.store[b].attr(a):new this(a);this._reqs&&(this.store[a[this.id]]=
b);return b}}},{isNew:function(){var a=G(this);return!(a||0===a)},save:function(a,b){return sa(this,this.isNew()?"create":"update",a,b)},destroy:function(a,b){return sa(this,"destroy",a,b,"destroyed")},bind:function(a){ra.test(a)||(this._bindings||(this.constructor.store[G(this)]=this,this._bindings=0),this._bindings++);return c.Observe.prototype.bind.apply(this,arguments)},unbind:function(a){ra.test(a)||(this._bindings--,this._bindings||delete this.constructor.store[G(this)]);return c.Observe.prototype.unbind.apply(this,
arguments)},___set:function(a,b){c.Observe.prototype.___set.call(this,a,b);a===this.constructor.id&&this._bindings&&(this.constructor.store[G(this)]=this)}});c.each({makeFindAll:"models",makeFindOne:"model"},function(a,b){c.Model[b]=function(b){return function(c,f,h){c=Ya(b.call(this,c),this,a);c.then(f,h);return c}}});c.each(["created","updated","destroyed"],function(a){c.Model.prototype[a]=function(b){var d=this.constructor;b&&"object"==typeof b&&this.attr(b.attr?b.attr():b);c.trigger(this,a);c.trigger(this,
"change",a);c.trigger(d,a,this)}});var W=c.Model.List=c.Observe.List({setup:function(){c.Observe.List.prototype.setup.apply(this,arguments);var a=this;this.bind("change",function(b,d){if(/\w+\.destroyed/.test(d)){var c=a.indexOf(b.target);-1!=c&&a.splice(c,1)}})}}),ab=/^\d+$/,bb=/([^\[\]]+)|(\[\])/g,cb=/([^?#]*)(#.*)?$/,ua=function(a){return decodeURIComponent(a.replace(/\+/g," "))};c.extend(c,{deparam:function(a){var b={},d;a&&cb.test(a)&&(a=a.split("&"),c.each(a,function(a){for(var a=a.split("="),
c=ua(a.shift()),h=ua(a.join("=")),g=b,a=c.match(bb),c=0,i=a.length-1;c<i;c++)g[a[c]]||(g[a[c]]=ab.test(a[c+1])||"[]"==a[c+1]?[]:{}),g=g[a[c]];d=a.pop();"[]"==d?g.push(h):g[d]=h}));return b}});var va=/\:([\w\.]+)/g,db=function(a){var b=[];c.each(a,function(a,e){b.push(("className"===e?"class":e)+'="'+("href"===e?a:c.esc(a))+'"')});return b.join(" ")},wa=function(a,b){var d=0,c=0,f={},h;for(h in a.defaults)a.defaults[h]===b[h]&&(f[h]=1,d++);for(;c<a.names.length;c++){if(!b.hasOwnProperty(a.names[c]))return-1;
f[a.names[c]]||d++}return d},xa=!0,X=n.location,y=c.each,m=c.extend;c.route=function(a,b){var b=b||{},d=[],e=a.replace(va,function(e,h,g){d.push(h);return"([^\\"+(a.substr(g+e.length,1)||c.route._querySeparator)+"]"+(b[h]?"*":"+")+")"});c.route.routes[a]={test:RegExp("^"+e+"($|"+(c.route._querySeparator+"").replace(/([.?*+\^$\[\]\\(){}|\-])/g,"\\$1")+")"),route:a,names:d,defaults:b,length:a.split("/").length};return c.route};m(c.route,{_querySeparator:"&",_paramsMatcher:/^(?:&[^=]+=[^&]*)+/,param:function(a,
b){var d,e=0,f,h=a.route,g=0;delete a.route;y(a,function(){g++});y(c.route.routes,function(b){f=wa(b,a);f>e&&(d=b,e=f);if(f>=g)return!1});c.route.routes[h]&&wa(c.route.routes[h],a)===e&&(d=c.route.routes[h]);if(d){var i=m({},a),h=d.route.replace(va,function(b,c){delete i[c];return a[c]===d.defaults[c]?"":encodeURIComponent(a[c])}),s;y(d.defaults,function(a,b){i[b]===a&&delete i[b]});s=c.param(i);b&&c.route.attr("route",d.route);return h+(s?c.route._querySeparator+s:"")}return c.isEmptyObject(a)?"":
c.route._querySeparator+c.param(a)},deparam:function(a){var b={length:-1};y(c.route.routes,function(c){c.test.test(a)&&c.length>b.length&&(b=c)});if(-1<b.length){var d=a.match(b.test),e=d.shift(),f=(e=a.substr(e.length-(d[d.length-1]===c.route._querySeparator?1:0)))&&c.route._paramsMatcher.test(e)?c.deparam(e.slice(1)):{},f=m(!0,{},b.defaults,f);y(d,function(a,d){a&&a!==c.route._querySeparator&&(f[b.names[d]]=decodeURIComponent(a))});f.route=b.route;return f}a.charAt(0)!==c.route._querySeparator&&
(a=c.route._querySeparator+a);return c.route._paramsMatcher.test(a)?c.deparam(a.slice(1)):{}},data:new c.Observe({}),routes:{},ready:function(a){!1===a&&(xa=a);if(!0===a||!0===xa)c.route._setup(),ya();return c.route},url:function(a,b){b&&(a=m({},Y,a));return"#!"+c.route.param(a)},link:function(a,b,d,e){return"<a "+db(m({href:c.route.url(b,e)},d))+">"+a+"</a>"},current:function(a){return X.hash=="#!"+c.route.param(a)},_setup:function(){c.bind.call(n,"hashchange",ya)},_getHash:function(){return X.href.split(/#!?/)[1]||
""},_setHash:function(a){a=c.route.param(a,!0);X.hash="#!"+a;return a}});y("bind unbind delegate undelegate attr removeAttr".split(" "),function(a){c.route[a]=function(){return c.route.data[a].apply(c.route.data,arguments)}});var za,Y,ya=c.route.setState=function(){var a=c.route._getHash();Y=c.route.deparam(a);(!Z||a!==Aa)&&c.route.attr(Y,!0)},Aa,Z;c.route.bind("change",function(){Z=1;clearTimeout(za);za=setTimeout(function(){Z=0;var a=c.route.data.serialize();Aa=c.route._setHash(a)},1)});c.bind.call(document,
"ready",c.route.ready);c.route.constructor.canMakeObserve=c.Observe.canMakeObserve;var E=function(a,b,d){c.bind.call(a,b,d);return function(){c.unbind.call(a,b,d)}},C=c.isFunction,m=c.extend,y=c.each,eb=[].slice,Ba=/\{([^\}]+)\}/g,fb=c.getObject("$.event.special")||{},Ca=function(a,b,d,e){c.delegate.call(a,b,d,e);return function(){c.undelegate.call(a,b,d,e)}},$;c.Control=c.Construct({setup:function(){c.Construct.setup.apply(this,arguments);if(c.Control){var a;this.actions={};for(a in this.prototype)this._isAction(a)&&
(this.actions[a]=this._action(a))}},_shifter:function(a,b){var d="string"==typeof b?a[b]:b;C(d)||(d=a[d]);return function(){a.called=b;return d.apply(a,[this.nodeName?c.$(this):this].concat(eb.call(arguments,0)))}},_isAction:function(a){var b=this.prototype[a],c=typeof b;return"constructor"!==a&&("function"==c||"string"==c&&C(this.prototype[b]))&&!(!fb[a]&&!aa[a]&&!/[^\w]/.test(a))},_action:function(a,b){Ba.lastIndex=0;if(b||!Ba.test(a)){var d=b?c.sub(a,[b,n]):a,e=c.isArray(d),f=e?d[1]:d,h=f.split(/\s+/g),
g=h.pop();return{processor:aa[g]||$,parts:[f,h.join(" "),g],delegate:e?d[0]:l}}},processors:{},defaults:{}},{setup:function(a,b){var d=this.constructor,e=d.pluginName||d._fullName;this.element=c.$(a);e&&"can_control"!==e&&this.element.addClass(e);(e=c.data(this.element,"controls"))||c.data(this.element,"controls",e=[]);e.push(this);this.options=m({},d.defaults,b);this.on();return[this.element,this.options]},on:function(a,b,d,e){if(!a){this.off();var a=this.constructor,b=this._bindings,d=a.actions,
e=this.element,f=c.Control._shifter(this,"destroy"),h,g;for(h in d)d.hasOwnProperty(h)&&(g=d[h]||a._action(h,this.options),b.push(g.processor(g.delegate||e,g.parts[2],g.parts[1],h,this)));c.bind.call(e,"destroyed",f);b.push(function(a){c.unbind.call(a,"destroyed",f)});return b.length}"string"==typeof a&&(e=d,d=b,b=a,a=this.element);e===l&&(e=d,d=b,b=null);"string"==typeof e&&(e=c.Control._shifter(this,e));this._bindings.push(b?Ca(a,c.trim(b),d,e):E(a,d,e));return this._bindings.length},off:function(){var a=
this.element[0];y(this._bindings||[],function(b){b(a)});this._bindings=[]},destroy:function(){var a=this.constructor,a=a.pluginName||a._fullName;this.off();a&&"can_control"!==a&&this.element.removeClass(a);a=c.data(this.element,"controls");a.splice(c.inArray(this,a),1);c.trigger(this,"destroyed");this.element=null}});var aa=c.Control.processors;$=function(a,b,d,e,f){e=c.Control._shifter(f,e);return d?Ca(a,c.trim(d),b,e):E(a,b,e)};y("change click contextmenu dblclick keydown keyup keypress mousedown mousemove mouseout mouseover mouseup reset resize scroll select submit focusin focusout mouseenter mouseleave touchstart touchmove touchcancel touchend touchleave".split(" "),
function(a){aa[a]=$});c.Control.processors.route=function(a,b,d,e,f){d=d||"";c.route(d);var h,g=function(a){if(c.route.attr("route")===d&&(a.batchNum===l||a.batchNum!==h))if(h=a.batchNum,a=c.route.attr(),delete a.route,c.isFunction(f[e]))f[e](a);else f[f[e]](a)};c.route.bind("change",g);return function(){c.route.unbind("change",g)}};var C=c.isFunction,gb=c.makeArray,Da=1,k=c.view=function(a,b,d,e){a=k.render(a,b,d,e);return C(a)?a:c.isDeferred(a)?a.pipe(function(a){return k.frag(a)}):k.frag(a)};c.extend(k,
{frag:function(a,b){return k.hookup(k.fragment(a),b)},fragment:function(a){a=c.buildFragment(a,document.body);a.childNodes.length||a.appendChild(document.createTextNode(""));return a},toId:function(a){return c.map(a.toString().split(/\/|\./g),function(a){if(a)return a}).join("_")},hookup:function(a,b){var d=[],e,f;c.each(a.childNodes?c.makeArray(a.childNodes):a,function(a){1===a.nodeType?(d.push(a),d.push.apply(d,c.makeArray(a.getElementsByTagName("*")))):3===a.nodeType&&a.textContent&&(a.textContent=
a.textContent.replace(/@@!!@@/g,""))});c.each(d,function(a){if(a.getAttribute&&(e=a.getAttribute("data-view-id"))&&(f=k.hookups[e]))f(a,b,e),delete k.hookups[e],a.removeAttribute("data-view-id")});return a},hookups:{},hook:function(a){k.hookups[++Da]=a;return" data-view-id='"+Da+"'"},cached:{},cachedRenderers:{},cache:!0,register:function(a){this.types["."+a.suffix]=a},types:{},ext:".ejs",registerScript:function(){},preload:function(){},render:function(a,b,d,e){C(d)&&(e=d,d=l);var f=hb(b);if(f.length){var h=
new c.Deferred;f.push(Ea(a,!0));c.when.apply(c,f).then(function(a){var f=gb(arguments),g=f.pop();if(c.isDeferred(b))b=Fa(a);else for(var l in b)c.isDeferred(b[l])&&(b[l]=Fa(f.shift()));f=g(b,d);h.resolve(f);e&&e(f)});return h}var g,f=C(e),h=Ea(a,f);if(f)g=h,h.then(function(a){e(b?a(b,d):a)});else{if("resolved"===h.state()&&h.__view_id)return a=k.cachedRenderers[h.__view_id],b?a(b,d):a;h.then(function(a){g=b?a(b,d):a})}return g},registerView:function(a,b,d,e){b=(d||k.types[k.ext]).renderer(a,b);e=
e||new c.Deferred;k.cache&&(k.cached[a]=e,e.__view_id=a,k.cachedRenderers[a]=b);return e.resolve(b)}});var Ga=function(a,b){if(!a.length)throw"can.view: No template or empty template:"+b;},Ea=function(a,b){var d=a.match(/\.[\w\d]+$/),e,f,h;a.match(/^#/)&&(a=a.substr(1));if(f=document.getElementById(a))d="."+f.type.match(/\/(x\-)?(.+)/)[2];!d&&!k.cached[a]&&(a+=d=k.ext);c.isArray(d)&&(d=d[0]);h=k.toId(a);if(a.match(/^\/\//))var g=a.substr(2),a=!n.steal?g:steal.config().root.mapJoin(g);e=k.types[d];
if(k.cached[h])return k.cached[h];if(f)return k.registerView(h,f.innerHTML,e);var i=new c.Deferred;c.ajax({async:b,url:a,dataType:"text",error:function(b){Ga("",a);i.reject(b)},success:function(b){Ga(b,a);k.registerView(h,b,e,i)}});return i},hb=function(a){var b=[];if(c.isDeferred(a))return[a];for(var d in a)c.isDeferred(a[d])&&b.push(a[d]);return b},Fa=function(a){return c.isArray(a)&&"success"===a[1]?a[0]:a};n.steal&&steal.type("view js",function(a,b){var c=k.types["."+a.type],e=k.toId(a.id);a.text=
"steal('"+(c.plugin||"can/view/"+a.type)+"',function(can){return can.view.preload('"+e+"',"+a.text+");\n})";b()});c.extend(k,{register:function(a){this.types["."+a.suffix]=a;n.steal&&steal.type(a.suffix+" view js",function(a,c){var e=k.types["."+a.type],f=k.toId(a.id+"");a.text=e.script(f,a.text);c()});k[a.suffix]=function(b,d){if(!d)return a.renderer(null,b);k.preload(b,a.renderer(b,d));return c.view(b)}},registerScript:function(a,b,c){return"can.view.preload('"+b+"',"+k.types["."+a].script(b,c)+
");"},preload:function(a,b){k.cached[a]=(new c.Deferred).resolve(function(a,c){return b.call(a,a,c)});return function(){return k.frag(b.apply(this,arguments))}}});var ib=function(a,b){var d;c.Observe&&(d=c.Observe.__reading,c.Observe.__reading=function(a,b){e.push({obj:a,attr:b})});var e=[],f=a.call(b);c.Observe&&(c.Observe.__reading=d);return{value:f,observed:e}},Ha=function(a,b,d){var e={},f=!0,h={value:l,teardown:function(){for(var a in e){var b=e[a];b.observe.obj.unbind(b.observe.attr,i);delete e[a]}}},
g,i=function(a){if(a.batchNum===l||a.batchNum!==g){var b=h.value,c=j();h.value=c;c!==b&&d(c,b);g=g=a.batchNum}},j=function(){var d=ib(a,b),g=d.observed,d=d.value;f=!f;c.each(g,function(a){e[a.obj._cid+"|"+a.attr]?e[a.obj._cid+"|"+a.attr].matched=f:(e[a.obj._cid+"|"+a.attr]={matched:f,observe:a},a.obj.bind(a.attr,i))});for(var h in e)g=e[h],g.matched!==f&&(g.observe.obj.unbind(g.observe.attr,i),delete e[h]);return d};h.value=j();h.isListening=!c.isEmptyObject(e);return h};c.compute=function(a,b){if(a&&
a.isComputed)return a;var d,e=0,f,h=!0;"function"===typeof a?f=function(g){return g===l?d?(e&&c.Observe.__reading&&c.Observe.__reading(f,"change"),d.value):a.call(b||this):a.apply(b||this,arguments)}:(f=function(b){if(b===l)return c.Observe.__reading&&c.Observe.__reading(f,"change"),a;var d=a;a=b;d!==b&&c.Observe.triggerBatch(f,"change",[b,d]);return b},h=!1);f.isComputed=!0;f.bind=function(g,i){c.addEvent.apply(f,arguments);e===0&&h&&(d=Ha(a,b||this,function(a,b){c.Observe.triggerBatch(f,"change",
[a,b])}));e++};f.unbind=function(a,b){c.removeEvent.apply(f,arguments);e--;e===0&&h&&d.teardown()};return f};c.compute.binder=Ha;var jb=/(\r|\n)+/g,ba={option:"textContent",textarea:"value"},Ia={tr:"tbody",option:"select",td:"tr",li:"ul"},kb=function(a,b,c){if(a)return a;for(;c<b.length;){if("<"==b[c]&&Ia[b[c+1]])return Ia[b[c+1]];c++}},lb=function(a){eval(a)},mb=/([^\s]+)[\s]*=[\s]*$/,A=null,ca=x=null,v=null,da=function(){return x?"'"+ca.match(mb)[1]+"'":A?1:0};c.view.Scanner=Scanner=function(a){c.extend(this,
{text:{},tokens:[]},a);this.tokenReg=[];this.tokenSimple={"<":"<",">":">",'"':'"',"'":"'"};this.tokenComplex=[];this.tokenMap={};for(var a=0,b;b=this.tokens[a];a++)b[2]?(this.tokenReg.push(b[2]),this.tokenComplex.push({abbr:b[1],re:RegExp(b[2]),rescan:b[3]})):(this.tokenReg.push(b[1]),this.tokenSimple[b[1]]=b[0]),this.tokenMap[b[0]]=b[1];this.tokenReg=RegExp("("+this.tokenReg.slice(0).concat(["<",">",'"',"'"]).join("|")+")","g")};Scanner.prototype={helpers:[{name:/\s*\(([\$\w]+)\)\s*->([^\n]*)/,fn:function(a){a=
a.match(/\s*\(([\$\w]+)\)\s*->([^\n]*)/);return"function(__){var "+a[1]+"=can.$(__);"+a[2]+"}"}}],scan:function(a,b){var c=[],e=0,f=this.tokenSimple,h=this.tokenComplex,a=a.replace(jb,"\n");a.replace(this.tokenReg,function(b,g){var i=arguments[arguments.length-2];i>e&&c.push(a.substring(e,i));if(f[b])c.push(b);else for(var o=0,j;j=h[o];o++)if(j.re.test(b)){c.push(j.abbr);j.rescan&&c.push(j.rescan(g));break}e=i+g.length});e<a.length&&c.push(a.substr(e));var g="",i=["var ___v1ew = [];"+(this.text.start||
"")],j=function(a,b){i.push("___v1ew.push(",'"',a.split("\\").join("\\\\").split("\n").join("\\n").split('"').join('\\"').split("\t").join("\\t"),'"'+(b||"")+");")},k=[],t,m=null,n=!1,r="",q=[],H=0,u,o=this.tokenMap;for(A=x=ca=null;(u=c[H++])!==l;){if(null===m)switch(u){case o.left:case o.escapeLeft:case o.returnLeft:n=A&&1;case o.commentLeft:m=u;g.length&&j(g);g="";break;case o.escapeFull:n=A&&1;v=1;m=o.escapeLeft;g.length&&j(g);v=c[H++];g=v.content||v;v.before&&j(v.before);c.splice(H,0,o.right);
break;case o.commentFull:break;case o.templateLeft:g+=o.left;break;case "<":0!==c[H].indexOf("!--")&&(A=1,n=0);g+=u;break;case ">":A=0;t="/"==g.substr(g.length-1);n||ba[q[q.length-1]]?(t?j(g.substr(0,g.length-1),',can.view.pending(),"/>"'):j(g,',can.view.pending(),">"'),g=""):g+=u;t&&(q.pop(),r=q[q.length-1]);break;case "'":case '"':A&&(x&&x===u?x=null:null===x&&(x=u,ca=t));default:"<"===t&&(r=u.split(/\s/)[0],0===r.indexOf("/")&&q.pop()===r.substr(1)?r=q[q.length-1]:q.push(r)),g+=u}else switch(u){case o.right:case o.returnRight:switch(m){case o.left:t=
--g.split("{").length- --g.split("}").length;1==t?(i.push("___v1ew.push(","can.view.txt(0,'"+kb(r,c,H)+"',"+da()+",this,function(){","var ___v1ew = [];",g),k.push({before:"",after:"return ___v1ew.join('')}));\n"})):(e=k.length&&-1==t?k.pop():{after:";"},e.before&&i.push(e.before),i.push(g,";",e.after));break;case o.escapeLeft:case o.returnLeft:(t=--g.split("{").length- --g.split("}").length)&&k.push({before:"return ___v1ew.join('')",after:"}));"});for(var m=m===o.escapeLeft?1:0,nb={insert:"___v1ew.push(",
tagName:r,status:da()},ea=0;ea<this.helpers.length;ea++){var p=this.helpers[ea];if(p.name.test(g)){g=p.fn(g,nb);p.name.source==/^>[\s|\w]\w*/.source&&(m=0);break}}"object"==typeof g?g.raw&&i.push(g.raw):i.push("___v1ew.push(","can.view.txt("+m+",'"+r+"',"+da()+",this,function(){ "+(this.text.escape||"")+"return ",g,t?"var ___v1ew = [];":"}));");v&&(v.after&&v.after.length)&&(j(v.after.length),v=null)}m=null;g="";break;case o.templateLeft:g+=o.left;break;default:g+=u}t=u}g.length&&j(g);i.push(";");
g={out:"with(_VIEW) { with (_CONTEXT) {"+i.join("")+" return ___v1ew.join('')}}"};lb.call(g,"this.fn = (function(_CONTEXT,_VIEW){"+g.out+"});\r\n//@ sourceURL="+b+".js");return g}};var fa=!0;try{document.createTextNode()._=0}catch(rb){fa=!1}var I={"class":"className",value:"value",textContent:"textContent"},ob={"":"span",table:"tr",tr:"td",ol:"li",ul:"li",tbody:"tr",thead:"tr",tfoot:"tr",select:"option",optgroup:"option"},pb=/__!!__/g,ba={option:"textContent",textarea:"value"},Ja=c.each(["checked",
"disabled","readonly","required"],function(a){I[a]=a}),ga=function(a,b,d){I[b]?a[I[b]]=-1<c.inArray(b,Ja)?!0:d:a.setAttribute(b,d)},J=[],ha=function(a){if("string"==typeof a)return a;if(!a&&0!==a)return"";var b=a.hookup&&function(b,c){a.hookup.call(a,b,c)}||"function"==typeof a&&a;return b?(J.push(b),""):""+a},qb=function(a){return"string"==typeof a||"number"==typeof a?c.esc(a):ha(a)},D={},N={},O={},ia="ejs_"+Math.random(),ja=0,B=function(a){if(fa||3!==a.nodeType)return a[ia]?a[ia]:a[ia]=(a.nodeName?
"element_":"obj_")+ ++ja;for(var b in N)if(N[b]===a)return b;N["text_"+ ++ja]=a;return"text_"+ja},Ka=function(a,b){var d=D[B(a)];if(d){var e=c.inArray(b,d);0<=e&&d.splice(e,1);d.length||delete D[B(a)]}},La=function(a,b){var c=D[B(a)];c||(c=D[B(a)]=[]);c.push(b)};c.extend(c.view,{pending:function(){var a=J.slice(0);lastHookups=a;J=[];return c.view.hook(function(b){c.each(a,function(a){a(b)})})},registerNode:function(a){var b=B(a);O[b]=a;c.each(a,function(a){La(a,b)})},unregisterNode:function(a){var b=
B(a);c.each(a,function(a){Ka(a,b)});delete O[b]},txt:function(a,b,d,e,f){var h=c.compute.binder(f,e,function(a,b){n(a,b)});if(!h.isListening)return(a||0!==d?qb:ha)(h.value);var g,i,j=function(){h.teardown();i&&c.view.unregisterNode(i)},k=function(a){c.bind.call(a,"destroyed",j);g=a},m=function(a){a||(j(),c.unbind.call(g,"destroyed",j))},e=ob[b]||"span",n,p=ba[b];if(0===d&&!p)return"<"+e+c.view.hook(a?function(a,b){n=function(a){d.nodeValue=""+a;m(d.parentNode)};var c=b&&11===a.parentNode.nodeType?
b:a.parentNode,d=document.createTextNode(h.value);c.insertBefore(d,a);c.removeChild(a);k(c)}:function(a,b){n=function(a){d[0].parentNode&&e(a);m(d[0].parentNode)};var b=b&&11===a.parentNode.nodeType?b:a.parentNode,d,e=function(e){var e=c.view.frag(e,b),f=c.makeArray(e.childNodes),g=d?d[d.length-1]:a;g.nextSibling?g.parentNode.insertBefore(e,g.nextSibling):g.parentNode.appendChild(e);d?(c.remove(c.$(d)),c.view.replace(d,f)):(c.remove(c.$(a)),i=d=f,c.view.registerNode(d))};e(h.value,[a]);k(b)})+">@@!!@@</"+
e+">";if(1===d){var r=h.value.replace(/['"]/g,"").split("=")[0];J.push(function(a){n=function(b){var b=(b||"").replace(/['"]/g,"").split("="),d=b[0];if(d!=r&&r){var e=r;-1<c.inArray(e,Ja)?a[e]=!1:a.removeAttribute(e)}d&&(ga(a,d,b[1]),r=d)};k(a)});return h.value}var q=0===d?p:d;(0===d?lastHookups:J).push(function(a){n=function(){ga(a,q,g.render(),p)};var b=c.$(a),d;(d=c.data(b,"hooks"))||c.data(b,"hooks",d={});var e=(I[q]?a[I[q]]:a.getAttribute(q))||"",b=e.split("__!!__"),f=[],g;f.push(b.shift(),b.join("__!!__"));
d[q]?d[q].bindings.push(h):d[q]={render:function(){var a=0;return e.replace(pb,function(){return ha(g.bindings[a++].value)})},bindings:[h],batchNum:l};g=d[q];f.splice(1,0,h.value);ga(a,q,f.join(""),p);k(a)});return"__!!__"},replace:function(a,b){a=c.makeArray(a);c.each(a,function(d){c.each(c.makeArray(D[B(d)]),function(e){var f=O[e],h=c.inArray(d,f),g=c.inArray(a[a.length-1],f);if(0<=h&&0<=g){for(var i=h;i<=g;i++)Ka(f[i],e);f.splice.apply(f,[h,g-h+1].concat(b));c.each(b,function(a){La(a,e)})}else c.view.unregisterNode(f)})})},
canExpando:fa,textNodeMap:N,nodeMap:D,nodeListMap:O});var m=c.extend,w=function(a){if(this.constructor!=w){var b=new w(a);return function(a,c){return b.render(a,c)}}"function"==typeof a?this.template={fn:a}:(m(this,a),this.template=this.scanner.scan(this.text,this.name))};c.EJS=w;w.prototype.render=function(a,b){a=a||{};return this.template.fn.call(a,a,new w.Helpers(a,b||{}))};m(w.prototype,{scanner:new c.view.Scanner({tokens:[["templateLeft","<%%"],["templateRight","%>"],["returnLeft","<%=="],["escapeLeft",
"<%="],["commentLeft","<%#"],["left","<%"],["right","%>"],["returnRight","%>"]]})});w.Helpers=function(a,b){this._data=a;this._extras=b;m(this,b)};w.Helpers.prototype={list:function(a,b){c.each(a,function(c,e){b(c,e,a)})}};c.view.register({suffix:"ejs",script:function(a,b){return"can.EJS(function(_CONTEXT,_VIEW) { "+(new w({text:b,name:a})).template.out+" })"},renderer:function(a,b){return w({text:b,name:a})}})})(window);
|
PypiClean
|
/gen_doc-0.1.4.tar.gz/gen_doc-0.1.4/gen_doc/commands.py
|
import click
from gen_doc.doc_generator import DocGenerator
from gen_doc.extensions import GenDocParsers
from gen_doc.serializers import GenDocSerializers
from gen_doc.utils.config_handler import copy_config, load_config
from .utils.command_utils import GroupWithCommandOptions
from .utils.utils import get_version
@click.group(
help="Utility for generating project documentation from docstrings",
cls=GroupWithCommandOptions,
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.option(
"-v",
"--version",
"version",
is_flag=True,
required=False,
default=False,
help="Get library version",
type=bool,
)
@click.option(
"-i",
"--init",
"init_var",
is_flag=True,
required=False,
default=False,
help="Init gen_doc config with default parameters",
type=bool,
)
@click.option(
"-b",
"--build",
"build_var",
is_flag=True,
required=False,
default=False,
help="Build documentation by config",
type=bool,
)
@click.pass_context
def entry_point(ctx, version, init_var, build_var):
if version:
print("GenDoc Version:", get_version())
if init_var:
ctx.invoke(init)
if build_var:
ctx.invoke(build, config=True)
@entry_point.command(
"init",
help="To init config file in order to generate documentation.",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.option(
"-f",
"--file-config",
"file_config",
show_default=True,
required=False,
default="gen_doc.yaml",
help="Config file name",
type=str,
)
@click.option(
"-o",
"--overwrite",
"overwrite",
is_flag=True,
required=False,
default=False,
help="To overwrite, in case file already exists",
type=bool,
)
def init(file_config: str = "gen_doc.yaml", overwrite: bool = False, *args, **kwargs):
welcome_string = """Config was created"""
is_correct = copy_config(file_config, overwrite)
if not is_correct:
return
print(welcome_string)
@entry_point.command(
"build",
help="Build documentation",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.argument(
"language",
required=False,
default="py",
type=click.Choice([i.name for i in GenDocParsers]),
)
@click.option(
"-sm",
"--save-mode",
"save_mode",
required=False,
default="md",
help="Save mode",
type=click.Choice([i.name for i in GenDocSerializers]),
)
@click.option(
"-hi",
"--hierarchically",
"hierarchically",
is_flag=True,
required=False,
default=True,
help="Extract with the same hierarchy",
type=bool,
)
@click.option(
"-o",
"--overwrite",
"overwrite",
is_flag=True,
required=False,
default=True,
help="To overwrite, in case file already exists",
type=bool,
)
@click.option(
"-p2r",
"--path-to-root",
"path_to_root",
required=False,
default=None,
help="Path to the directory for which documentation should be compiled",
type=str,
)
@click.option(
"-p2s",
"--path-to-save",
"path_to_save",
required=False,
default=None,
help="Path to the directory where the documentation should be saved",
type=str,
)
@click.option(
"-f2s",
"--file-to-save",
"file_to_save",
required=False,
default=None,
help="Path to the directory where the documentation should be saved",
type=str,
)
@click.option(
"-c",
"--config",
"config",
is_flag=True,
required=False,
default=False,
help="Use config for build documentation.",
type=bool,
)
@click.option(
"-f",
"--file-config",
"file_config",
show_default=True,
required=False,
default="gen_doc.yaml",
help="Config file name",
type=str,
)
def build(
language,
save_mode,
path_to_root,
config,
hierarchically,
overwrite,
path_to_save,
file_to_save,
file_config,
*args,
**kwargs,
):
if config:
configs = load_config(file_config)
if configs is None:
print("No config file to build. Use `gen_doc init` to initiate the config.")
return
elif not configs:
print("Specified incorrectly or broken file")
return
options = configs.get("OPTIONS", dict())
author = configs.get("AUTHOR", dict())
project = configs.get("PROJECT", dict())
allowed_parsers = [parser.name for parser in GenDocParsers]
if "language" not in options:
print(
"Please don't drop required fields from the config."
"Add `language` field to the config and try again."
)
return
if options["language"] not in allowed_parsers:
print(
f"You specified unavailable value for languages."
f"Available values are: {allowed_parsers}"
)
return
parser = DocGenerator(
parse_mode=options["language"],
path_to_root_folder=options.get("path_to_root_folder", None),
extract_with_same_hierarchy=options.get(
"extract_with_same_hierarchy", True
),
overwrite_if_file_exists=options.get("overwrite_if_file_exists", False),
path_to_save=options.get("path_to_save", None),
file_to_save=options.get("file_to_save", None),
save_mode=options.get("save_mode", "md"),
additional_files_to_ignore=options.get("additional_files_to_ignore", None),
additional_folders_to_ignore=options.get(
"additional_folders_to_ignore", None
),
title=project.get("title"),
description=project.get("description"),
repository_main_url=project.get("repository"),
release=project.get("release"),
author=author.get("author"),
author_contacts=author.get("author_contacts"),
)
else:
parser = DocGenerator(
parse_mode=language,
path_to_root_folder=path_to_root,
extract_with_same_hierarchy=hierarchically,
overwrite_if_file_exists=overwrite,
path_to_save=path_to_save,
file_to_save=file_to_save,
save_mode=save_mode,
)
parser.generate()
if __name__ == "__main__":
entry_point()
|
PypiClean
|
/bert_reranker-0.2.1.tar.gz/bert_reranker-0.2.1/bert_reranker/models/bert_encoder.py
|
import logging
import pickle
import torch
import torch.nn as nn
from transformers import AutoModel
from bert_reranker.models.general_encoder import GeneralEncoder
from bert_reranker.utils.hp_utils import check_and_log_hp
logger = logging.getLogger(__name__)
def get_ffw_layers(
prev_hidden_size, dropout, layer_sizes, append_relu_and_dropout_after_last_layer):
result = []
for i, size in enumerate(layer_sizes):
result.append(nn.Linear(prev_hidden_size, size))
if i < len(layer_sizes) - 1 or append_relu_and_dropout_after_last_layer:
result.append(nn.ReLU())
result.append(nn.Dropout(p=dropout, inplace=False))
prev_hidden_size = size
return result
def hashable(input_id):
return tuple(input_id.cpu().numpy())
class BertEncoder(GeneralEncoder):
def __init__(self, hyper_params, bert_model, name=''):
model_hparams = hyper_params['model']
check_and_log_hp(
['bert_base', 'dropout_bert', 'freeze_bert'],
model_hparams)
if bert_model is None:
bert = AutoModel.from_pretrained(model_hparams['bert_base'])
else:
bert = bert_model
super(BertEncoder, self).__init__(hyper_params, bert.config.hidden_size)
self.bert = bert
self.name = name
bert_dropout = model_hparams['dropout_bert']
if bert_dropout is not None:
logger.info('setting bert dropout to {}'.format(bert_dropout))
self.bert.config.attention_probs_dropout_prob = bert_dropout
self.bert.config.hidden_dropout_prob = bert_dropout
else:
logger.info('using the original bert model dropout')
self.freeze_bert = model_hparams['freeze_bert']
def get_encoder_hidden_states(self, input_ids, attention_mask, token_type_ids):
if self.freeze_bert:
with torch.no_grad():
bert_hs, _ = self.bert(input_ids=input_ids, attention_mask=attention_mask,
token_type_ids=token_type_ids)
else:
bert_hs, _ = self.bert(input_ids=input_ids, attention_mask=attention_mask,
token_type_ids=token_type_ids)
return bert_hs
class CachedBertEncoder(BertEncoder):
def __init__(self, hyper_params, bert_model, name=''):
model_hparams = hyper_params['model']
check_and_log_hp(
['bert_base', 'dropout_bert', 'freeze_bert', 'cache_size'],
model_hparams)
super(CachedBertEncoder, self).__init__(hyper_params, bert_model, name=name)
if not model_hparams['freeze_bert'] or not model_hparams['dropout_bert'] == 0.0:
raise ValueError('to cache results, set freeze_bert=True and dropout_bert=0.0')
self.cache = {}
self.cache_hit = 0
self.cache_miss = 0
self.max_cache_size = model_hparams['cache_size']
def _search_in_cache(self, input_ids, attention_mask, token_type_ids):
results = []
still_to_compute_iids = []
still_to_compute_am = []
still_to_compute_tti = []
for i in range(input_ids.shape[0]):
ids_hash = hashable(input_ids[i])
if ids_hash in self.cache:
results.append(self.cache[ids_hash].to(input_ids.device))
else:
results.append(None)
still_to_compute_iids.append(input_ids[i])
still_to_compute_am.append(attention_mask[i])
still_to_compute_tti.append(token_type_ids[i])
return results, still_to_compute_iids, still_to_compute_am, still_to_compute_tti
def _store_in_cache_and_get_results(self, cache_results, bert_hs, still_to_compute_iids):
final_results = []
non_cached_result_index = 0
for cache_result in cache_results:
if cache_result is None:
non_cached_result = bert_hs[non_cached_result_index]
final_results.append(non_cached_result)
if len(self.cache) < self.max_cache_size:
self.cache[hashable(still_to_compute_iids[non_cached_result_index])] = \
non_cached_result.cpu()
non_cached_result_index += 1
else:
final_results.append(cache_result)
assert non_cached_result_index == bert_hs.shape[0]
return torch.stack(final_results, dim=0)
def get_encoder_hidden_states(self, input_ids, attention_mask, token_type_ids):
cache_results, still_to_compute_iids, still_to_compute_am, still_to_compute_tti = \
self._search_in_cache(input_ids, attention_mask, token_type_ids)
self.cache_hit += input_ids.shape[0] - len(still_to_compute_iids)
self.cache_miss += len(still_to_compute_iids)
if len(still_to_compute_iids) == 0:
return torch.stack(cache_results, dim=0)
input_ids = torch.stack(still_to_compute_iids, dim=0)
attention_mask = torch.stack(still_to_compute_am, dim=0)
token_type_ids = torch.stack(still_to_compute_tti, dim=0)
if self.freeze_bert:
with torch.no_grad():
bert_hs, _ = self.bert(input_ids=input_ids, attention_mask=attention_mask,
token_type_ids=token_type_ids)
else:
bert_hs, _ = self.bert(input_ids=input_ids, attention_mask=attention_mask,
token_type_ids=token_type_ids)
if self.cache is not None:
bert_hs = self._store_in_cache_and_get_results(
cache_results, bert_hs, still_to_compute_iids)
return bert_hs
def save_cache(self, save_to):
with open(save_to, "wb") as out_stream:
pickle.dump(self.cache, out_stream)
def load_cache(self, load_from):
with open(load_from, "rb") as in_stream:
self.cache = pickle.load(in_stream)
return len(self.cache)
def print_stats_to(self, print_function):
print_function('{}: cache size {} / cache hits {} / cache misses {}'.format(
self.name, len(self.cache), self.cache_hit, self.cache_miss))
|
PypiClean
|
/trilium_client-0.1.0-py3-none-any.whl/trilium_client/rest.py
|
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
from urllib.parse import urlencode, quote_plus
import urllib3
from trilium_client.exceptions import (
ApiException,
UnauthorizedException,
ForbiddenException,
NotFoundException,
ServiceException,
ApiValueError,
)
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.headers
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.headers.get(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args[
"assert_hostname"
] = configuration.assert_hostname # noqa: E501
if configuration.retries is not None:
addition_pool_args["retries"] = configuration.retries
if configuration.socket_options is not None:
addition_pool_args["socket_options"] = configuration.socket_options
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
proxy_headers=configuration.proxy_headers,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(
self,
method,
url,
query_params=None,
headers=None,
body=None,
post_params=None,
_preload_content=True,
_request_timeout=None,
):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in [
"GET",
"HEAD",
"DELETE",
"POST",
"PUT",
"PATCH",
"OPTIONS",
]
if post_params and body:
raise ApiValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
# url already contains the URL query string
# so reset query_params to empty dict
query_params = {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, float)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (
isinstance(_request_timeout, tuple)
and len(_request_timeout) == 2
):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1]
)
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
# no content type provided or payload is json
if not headers.get("Content-Type") or re.search(
"json", headers["Content-Type"], re.IGNORECASE
):
request_body = None
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method,
url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers,
)
elif (
headers["Content-Type"]
== "application/x-www-form-urlencoded"
): # noqa: E501
r = self.pool_manager.request(
method,
url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers,
)
elif headers["Content-Type"] == "multipart/form-data":
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers["Content-Type"]
r = self.pool_manager.request(
method,
url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers,
)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str) or isinstance(body, bytes):
request_body = body
r = self.pool_manager.request(
method,
url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers,
)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(
method,
url,
fields={},
preload_content=_preload_content,
timeout=timeout,
headers=headers,
)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
if r.status == 401:
raise UnauthorizedException(http_resp=r)
if r.status == 403:
raise ForbiddenException(http_resp=r)
if r.status == 404:
raise NotFoundException(http_resp=r)
if 500 <= r.status <= 599:
raise ServiceException(http_resp=r)
raise ApiException(http_resp=r)
return r
def get_request(
self,
url,
headers=None,
query_params=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"GET",
url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params,
)
def head_request(
self,
url,
headers=None,
query_params=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"HEAD",
url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params,
)
def options_request(
self,
url,
headers=None,
query_params=None,
post_params=None,
body=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"OPTIONS",
url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body,
)
def delete_request(
self,
url,
headers=None,
query_params=None,
body=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"DELETE",
url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body,
)
def post_request(
self,
url,
headers=None,
query_params=None,
post_params=None,
body=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"POST",
url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body,
)
def put_request(
self,
url,
headers=None,
query_params=None,
post_params=None,
body=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"PUT",
url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body,
)
def patch_request(
self,
url,
headers=None,
query_params=None,
post_params=None,
body=None,
_preload_content=True,
_request_timeout=None,
):
return self.request(
"PATCH",
url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body,
)
|
PypiClean
|
/gw_matched_filtering_demo-2020.1.tar.gz/gw_matched_filtering_demo-2020.1/README.md
|
Quick start
===========
* The presentation can be found
[here](http://moble.github.io/MatchedFiltering/Presentation.slides.html).
* A preview of the notebook can be seen
[here](http://nbviewer.ipython.org/github/moble/MatchedFiltering/blob/gh-pages/MatchedFiltering.ipynb),
but note that there are cool interactive things that are missing
unless you download and run it yourself.
Details
=======
This package is designed to be a simple demonstration of the principles of matched filtering. It
uses the analogy of LIGO as a microphone to explain the basic ideas, using a microphone attached to
the computer to study data as a function of time, noise sources, and real signals, as well as
headphones or a speaker to play back those signals and others. Examples are given where a signal is
buried in the noise and extracted using matched filtering. Real LIGO data and accurate
gravitational waveforms are also included with this code, and used for further examples to complete
the analogy. The concepts introduced here can be applied far more widely in all areas of data
analysis.
Fourier transforms are introduced, starting with a simple example of a pure tone (which can be
played by the computer), and progressing to Fourier transforms of noise and gravitational-wave
signals. The matched filter is then introduced by progressively building the formula with simple
explanations for each term. Discussion of these concepts is interwoven with practice using them.
The material is presented as a Jupyter notebook — which is an interactive python session, and
includes text explaining the concepts and code. This allows the explanations to be included (with
LaTeX equations) right among the code, and all in a live, interactive python session. No
familiarity with python is necessary for the student, though the setup may require some basic
skills.
To run the code
===============
If you are familiar with python packaging, you can probably figure out how to run this on your own.
Note that the required packages include ipython, jupyter, notebook, scipy, matplotlib, ipywidgets,
and widgetsnbextension.
It is much simpler to just use the [anaconda](https://www.anaconda.com/) python ecosystem. Once
anaconda is installed, just run the following at the command prompt:
```bash
conda env create moble/gw_matched_filtering_demo
conda activate gw_matched_filtering_demo
gw_matched_filtering_demo
```
This will install all the requirements into a new conda environment, switch to that environment,
then download and run the notebook.
Notes for classroom use
=======================
There are three reasonable ways to deliver this demonstration to students: as a presentation,
individually on the students' personal computers, and together in a computer lab.
Most likely, the presentation option is the least useful to students. Most students benefit
enormously from being able to interact with the notebook personally. They will be more interested,
able to read along at their own pace, and play with the parameters. If this is just not possible,
it would be best to go slowly and ask lots of questions of the students, possibly allowing one
student to actually run the commands while the teacher engages from off to the side.
A preferable option may be having the students download and run the code themselves. The only
caveat here is that the students will need to install the dependencies. With
[anaconda](https://www.anaconda.com/), this is not a problem. Assuming the students can run it,
there are questions included in the notebook. Their answers could be turned in as a homework
assignment, or a quiz given on the material to ensure that students actually go through the
notebook.
If this will be presented together in a computer lab, it is best if things are set up as much as
possible on each computer beforehand. The computers need to be using different accounts (with home
directories not on a shared file system), or ipython will get screwed up and run into errors.
|
PypiClean
|
/insanic-framework-0.9.2.tar.gz/insanic-framework-0.9.2/docs/source/insanic/errors_and_exceptions.rst
|
Error Handling
===============
.. note::
Take a look at Sanic's
`Exceptions <https://sanic.readthedocs.io/en/latest/sanic/exceptions.html>`_
documentation to better understand how Insanic's error handling works.
Insanic's error handling is done with Sanic's error handling
functionality, but with Insanic's own exception and error
definitions. Before we move onto the components that comprise of
an Insanic exception, let's take a look at a quick example.
.. code-block:: python
# in example/app.py
from insanic import Insanic
from insanic.conf import settings
from insanic.errors import GlobalErrorCodes
from insanic.exceptions import APIException
__version__ = '0.1.0'
settings.configure()
app = Insanic('example', version=__version__)
@app.route('/help')
def help_view(request, *args, **kwargs):
raise APIException("Help me! Something blew up!",
error_code=GlobalErrorCodes.error_unspecified,
status_code=400)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8000)
With this piece of code, let's try running it...
.. code-block:: bash
$ python app.py
Now by sending a request to the server...
.. code-block:: bash
curl -i http://0.0.0.0:8000/help
HTTP/1.1 400 Bad Request
Content-Length: 139
Content-Type: application/json
Connection: keep-alive
Keep-Alive: 60
{
"message":"An unknown error occurred",
"description":"Help me! Something blew up!",
"error_code":{
"name":"insanic_error_unspecified",
"value":999998
}
} # response was formatted for readability
From the response there are a couple components we need to
cover to understand how Insanic's error handler works.
#. The :code:`GlobalErrorCodes`
#. The :code:`APIException`
#. The response.
1. Error Codes
---------------
In a distributed system, errors can happen anywhere. It can happen within
the service you have created, it could happen down the road where you made a
request to another service for some additional information, or even worse,
the other service could get a different error message from a request that it had
to make to aggregate the response.
As a result, the only way to keep track and possibly debug the situation,
specific pin point traceability was very important. Of course, just returning
a 400 Bad Request error response might suffice, but in some instances,
an application may have to react in a certain manner if it receives a
particular 400 Bad Request error. For example, rolling back
a database commit only for a specific error.
Insanic provides common error codes, accessible in :code:`insanic.errors.GlobalErrorCodes`
but each service may provide their own specific error codes with one restriction.
The Error Code must be an :code:`Enum` type.
To create your own:
.. code-block:: python
# example/errors.py
from enum import Enum
class MyErrorCodes(Enum):
not_going_fast_enough = 10001
too_slow = 10002
help_me = 10003
When set to the :code:`error_code` attribute in the Insanic's
exception (we will get to this a bit later), the enum will be unpacked
by Insanic's Error Handler to a JSON object. So in our example,
:code:`MyErrorCodes.not_going_fast_enough` will be unpacked like so:
.. code-block:: json
{
"name":"not_going_fast_enough",
"value":10001
}
2. Insanic APIException
-------------------------
To actually create the error, Insanic provides its own :code:`APIException` base class for its own
error handling. This exception will create the response as shown in the first example.
There are 4 attributes to the exception.
#. :code:`status_code`: an integer representing the status code of the response.
#. :code:`description`: a string with human readable description of the error.
#. :code:`error_code`: an Enum as explained in the ErrorCode section above.
#. :code:`message`: a string with a general message.
There are several exceptions provided as base templates, but it is
up to the developer to define how detailed the exceptions will be.
Let's create some example execeptions:
.. code-block:: python
# example/exceptions.py
from insanic import status
from insanic.exceptions import APIException, BadRequest
from .errors import MyErrorCodes
class TooSlowException(APIException):
status_code = status.HTTP_408_REQUEST_TIMEOUT
description = "Too slow!"
error_code = MyErrorCodes.too_slow
class MyBadRequest(BadRequest):
error_code = MyErrorCodes.not_going_fast_enough
And now to use these exceptions...
.. code-block:: python
# example/views.py
from insanic import status
from insanic.exceptions import APIException
from .app import app # your insanic application
from .errors import MyErrorCodes
from .exceptions import TooSlowException
@app.route('/too_slow`)
def too_slow_view(request, *args, **kwargs):
raise TooSlowException()
@app.route('/very_slow')
def very_slow_view(request, *args, **kwargs):
raise TooSlowException("This is very slow!")
@app.route('/help_me_too_slow')
def help_me_too_slow(request, *args, **kwargs):
raise APIException(
"HELP ME!",
error_code=MyErrorCodes.help_me,
status_code=status.HTTP_504_GATEWAY_TIMEOUT
)
3. Putting ErrorCodes and Exceptions together
-----------------------------------------------
With exceptions and error codes defined, Insanic's error handler
will serialize the exception to the error response structure as shown in the
example.
.. code-block:: python
class TooSlowException(APIException):
status_code = status.HTTP_408_REQUEST_TIMEOUT
description = "Too slow!"
error_code = MyErrorCodes.too_slow
With this exception we created above, Insanic' Error Handler will create this response.
.. code-block:: json
{
"message":"An unknown error occurred",
"description":"Too slow!",
"error_code":{
"name":"too_slow",
"value":10002
}
}
- The :code:`status_code` is the status code of the response.
- The :code:`description` is the description.
- The :code:`message` is the message attribute in `APIException`.
- The :code:`error_code` is the unpacked enum.
What about NON-Insanic Exceptions?
-----------------------------------
Any Sanic Exceptions will automatically be converted to an
Insanic Exception and will try and serialize the message
into Insanic's error message format.
.. code-block:: python
from sanic.exceptions import ServiceUnavailable
@app.route('/sanic')
def raise_sanic(request, *args, **kwargs):
raise ServiceUnavailable('sanic error')
Will result in...
.. code-block:: bash
$ curl -i http://0.0.0.0:8000/sanic
HTTP/1.1 503 Service Unavailable
Content-Length: 126
Content-Type: application/json
Connection: keep-alive
Keep-Alive: 60
{
"message":"Service Unavailable",
"description":"sanic error",
"error_code":{
"name":"insanic_error_unspecified",
"value":999998
}
}
Any NON-Insanic and NON-Sanic exceptions raised during the process of a request
will default to a :code:`500 Internal Server Error`.
.. code-block:: python
@app.route('/builtin')
def raise_sanic(request, *args, **kwargs):
raise SystemError('sanic error')
.. code-block:: bash
$ curl -i http://localhost:8000/builtin
HTTP/1.1 500 Internal Server Error
Content-Length: 167
Content-Type: application/json
Connection: keep-alive
Keep-Alive: 60
{
"message":"Server Error",
"description":"Something has blown up really bad. Somebody should be notified?",
"error_code":{
"name":"insanic_unknown_error",
"value":999999
}
}
See Also...
-------------
- Refer to the :ref:`api-insanic-errors` module for insanic's ErrorCodes.
- Refer to the :ref:`api-insanic-exceptions` module for Insanic's Exceptions.
- Refer to the :ref:`api-insanic-status` module for easy status codes.
|
PypiClean
|
/safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/tado/binary_sensor.py
|
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_WINDOW,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
DATA,
DOMAIN,
SIGNAL_TADO_UPDATE_RECEIVED,
TYPE_AIR_CONDITIONING,
TYPE_BATTERY,
TYPE_HEATING,
TYPE_HOT_WATER,
TYPE_POWER,
)
from .entity import TadoDeviceEntity, TadoZoneEntity
_LOGGER = logging.getLogger(__name__)
DEVICE_SENSORS = {
TYPE_BATTERY: [
"battery state",
"connection state",
],
TYPE_POWER: [
"connection state",
],
}
ZONE_SENSORS = {
TYPE_HEATING: [
"power",
"link",
"overlay",
"early start",
"open window",
],
TYPE_AIR_CONDITIONING: [
"power",
"link",
"overlay",
"open window",
],
TYPE_HOT_WATER: ["power", "link", "overlay"],
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up the Tado sensor platform."""
tado = hass.data[DOMAIN][entry.entry_id][DATA]
devices = tado.devices
zones = tado.zones
entities = []
# Create device sensors
for device in devices:
if "batteryState" in device:
device_type = TYPE_BATTERY
else:
device_type = TYPE_POWER
entities.extend(
[
TadoDeviceBinarySensor(tado, device, variable)
for variable in DEVICE_SENSORS[device_type]
]
)
# Create zone sensors
for zone in zones:
zone_type = zone["type"]
if zone_type not in ZONE_SENSORS:
_LOGGER.warning("Unknown zone type skipped: %s", zone_type)
continue
entities.extend(
[
TadoZoneBinarySensor(tado, zone["name"], zone["id"], variable)
for variable in ZONE_SENSORS[zone_type]
]
)
if entities:
async_add_entities(entities, True)
class TadoDeviceBinarySensor(TadoDeviceEntity, BinarySensorEntity):
"""Representation of a tado Sensor."""
def __init__(self, tado, device_info, device_variable):
"""Initialize of the Tado Sensor."""
self._tado = tado
super().__init__(device_info)
self.device_variable = device_variable
self._unique_id = f"{device_variable} {self.device_id} {tado.home_id}"
self._state = None
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.home_id, "device", self.device_id
),
self._async_update_callback,
)
)
self._async_update_device_data()
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.device_name} {self.device_variable}"
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this sensor."""
if self.device_variable == "battery state":
return DEVICE_CLASS_BATTERY
if self.device_variable == "connection state":
return DEVICE_CLASS_CONNECTIVITY
return None
@callback
def _async_update_callback(self):
"""Update and write state."""
self._async_update_device_data()
self.async_write_ha_state()
@callback
def _async_update_device_data(self):
"""Handle update callbacks."""
try:
self._device_info = self._tado.data["device"][self.device_id]
except KeyError:
return
if self.device_variable == "battery state":
self._state = self._device_info["batteryState"] == "LOW"
elif self.device_variable == "connection state":
self._state = self._device_info.get("connectionState", {}).get(
"value", False
)
class TadoZoneBinarySensor(TadoZoneEntity, BinarySensorEntity):
"""Representation of a tado Sensor."""
def __init__(self, tado, zone_name, zone_id, zone_variable):
"""Initialize of the Tado Sensor."""
self._tado = tado
super().__init__(zone_name, tado.home_id, zone_id)
self.zone_variable = zone_variable
self._unique_id = f"{zone_variable} {zone_id} {tado.home_id}"
self._state = None
self._state_attributes = None
self._tado_zone_data = None
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.home_id, "zone", self.zone_id
),
self._async_update_callback,
)
)
self._async_update_zone_data()
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.zone_name} {self.zone_variable}"
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this sensor."""
if self.zone_variable == "early start":
return DEVICE_CLASS_POWER
if self.zone_variable == "link":
return DEVICE_CLASS_CONNECTIVITY
if self.zone_variable == "open window":
return DEVICE_CLASS_WINDOW
if self.zone_variable == "overlay":
return DEVICE_CLASS_POWER
if self.zone_variable == "power":
return DEVICE_CLASS_POWER
return None
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return self._state_attributes
@callback
def _async_update_callback(self):
"""Update and write state."""
self._async_update_zone_data()
self.async_write_ha_state()
@callback
def _async_update_zone_data(self):
"""Handle update callbacks."""
try:
self._tado_zone_data = self._tado.data["zone"][self.zone_id]
except KeyError:
return
if self.zone_variable == "power":
self._state = self._tado_zone_data.power == "ON"
elif self.zone_variable == "link":
self._state = self._tado_zone_data.link == "ONLINE"
elif self.zone_variable == "overlay":
self._state = self._tado_zone_data.overlay_active
if self._tado_zone_data.overlay_active:
self._state_attributes = {
"termination": self._tado_zone_data.overlay_termination_type
}
elif self.zone_variable == "early start":
self._state = self._tado_zone_data.preparation
elif self.zone_variable == "open window":
self._state = bool(
self._tado_zone_data.open_window
or self._tado_zone_data.open_window_detected
)
self._state_attributes = self._tado_zone_data.open_window_attr
|
PypiClean
|
/airbyte-cdk-0.51.10.tar.gz/airbyte-cdk-0.51.10/airbyte_cdk/sources/streams/http/requests_native_auth/token.py
|
import base64
from itertools import cycle
from typing import List
from airbyte_cdk.sources.streams.http.requests_native_auth.abstract_token import AbstractHeaderAuthenticator
class MultipleTokenAuthenticator(AbstractHeaderAuthenticator):
"""
Builds auth header, based on the list of tokens provided.
Auth header is changed per each `get_auth_header` call, using each token in cycle.
The token is attached to each request via the `auth_header` header.
"""
@property
def auth_header(self) -> str:
return self._auth_header
@property
def token(self) -> str:
return f"{self._auth_method} {next(self._tokens_iter)}"
def __init__(self, tokens: List[str], auth_method: str = "Bearer", auth_header: str = "Authorization"):
self._auth_method = auth_method
self._auth_header = auth_header
self._tokens = tokens
self._tokens_iter = cycle(self._tokens)
class TokenAuthenticator(AbstractHeaderAuthenticator):
"""
Builds auth header, based on the token provided.
The token is attached to each request via the `auth_header` header.
"""
@property
def auth_header(self) -> str:
return self._auth_header
@property
def token(self) -> str:
return f"{self._auth_method} {self._token}"
def __init__(self, token: str, auth_method: str = "Bearer", auth_header: str = "Authorization"):
self._auth_header = auth_header
self._auth_method = auth_method
self._token = token
class BasicHttpAuthenticator(AbstractHeaderAuthenticator):
"""
Builds auth based off the basic authentication scheme as defined by RFC 7617, which transmits credentials as USER ID/password pairs, encoded using bas64
https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#basic_authentication_scheme
"""
@property
def auth_header(self) -> str:
return self._auth_header
@property
def token(self) -> str:
return f"{self._auth_method} {self._token}"
def __init__(self, username: str, password: str = "", auth_method: str = "Basic", auth_header: str = "Authorization"):
auth_string = f"{username}:{password}".encode("utf8")
b64_encoded = base64.b64encode(auth_string).decode("utf8")
self._auth_header = auth_header
self._auth_method = auth_method
self._token = b64_encoded
|
PypiClean
|
/steamers_misago-0.0.2-py3-none-any.whl/misago/categories/views/permsadmin.py
|
from django.contrib import messages
from django.shortcuts import redirect
from django.utils.translation import gettext_lazy as _
from ...acl.cache import clear_acl_cache
from ...acl.forms import get_permissions_forms
from ...acl.models import Role
from ...acl.views import RoleAdmin, RolesList
from ...admin.views import generic
from ..forms import (
CategoryRoleForm,
CategoryRolesACLFormFactory,
RoleCategoryACLFormFactory,
)
from ..models import Category, CategoryRole, RoleCategoryACL
from .categoriesadmin import CategoriesList, CategoryAdmin
class CategoryRoleAdmin(generic.AdminBaseMixin):
root_link = "misago:admin:permissions:categories:index"
model = CategoryRole
templates_dir = "misago/admin/categoryroles"
message_404 = _("Requested role does not exist.")
class CategoryRolesList(CategoryRoleAdmin, generic.ListView):
ordering = (("name", None),)
class RoleFormMixin:
def real_dispatch(self, request, target):
form = CategoryRoleForm(instance=target)
perms_forms = get_permissions_forms(target)
if request.method == "POST":
perms_forms = get_permissions_forms(target, request.POST)
valid_forms = 0
for permissions_form in perms_forms:
if permissions_form.is_valid():
valid_forms += 1
form = CategoryRoleForm(request.POST, instance=target)
if form.is_valid():
if len(perms_forms) == valid_forms:
new_permissions = {}
for permissions_form in perms_forms:
cleaned_data = permissions_form.cleaned_data
new_permissions[permissions_form.prefix] = cleaned_data
form.instance.permissions = new_permissions
form.instance.save()
messages.success(
request, self.message_submit % {"name": target.name}
)
if "stay" in request.POST:
return redirect(request.path)
return redirect(self.root_link)
form.add_error(None, _("Form contains errors."))
return self.render(
request, {"form": form, "target": target, "perms_forms": perms_forms}
)
class NewCategoryRole(RoleFormMixin, CategoryRoleAdmin, generic.ModelFormView):
message_submit = _('New role "%(name)s" has been saved.')
class EditCategoryRole(RoleFormMixin, CategoryRoleAdmin, generic.ModelFormView):
message_submit = _('Role "%(name)s" has been changed.')
class DeleteCategoryRole(CategoryRoleAdmin, generic.ButtonView):
def check_permissions(self, request, target):
if target.special_role:
message = _('Role "%(name)s" is special role and can\'t be deleted.')
return message % {"name": target.name}
def button_action(self, request, target):
target.delete()
message = _('Role "%(name)s" has been deleted.')
messages.success(request, message % {"name": target.name})
class CategoryPermissions(CategoryAdmin, generic.ModelFormView):
templates_dir = "misago/admin/categoryroles"
template = "categoryroles.html"
def real_dispatch(self, request, target):
category_roles = CategoryRole.objects.order_by("name")
assigned_roles = {}
for acl in target.category_role_set.select_related("category_role"):
assigned_roles[acl.role_id] = acl.category_role
forms = []
forms_are_valid = True
for role in Role.objects.order_by("name"):
FormType = CategoryRolesACLFormFactory(
role, category_roles, assigned_roles.get(role.pk)
)
if request.method == "POST":
forms.append(FormType(request.POST, prefix=role.pk))
if not forms[-1].is_valid():
forms_are_valid = False
else:
forms.append(FormType(prefix=role.pk))
if request.method == "POST" and forms_are_valid:
target.category_role_set.all().delete()
new_permissions = []
for form in forms:
if form.cleaned_data["category_role"]:
new_permissions.append(
RoleCategoryACL(
role=form.role,
category=target,
category_role=form.cleaned_data["category_role"],
)
)
if new_permissions:
RoleCategoryACL.objects.bulk_create(new_permissions)
clear_acl_cache()
message = _("Category %(name)s permissions have been changed.")
messages.success(request, message % {"name": target.name})
if "stay" in request.POST:
return redirect(request.path)
return redirect(self.root_link)
return self.render(request, {"forms": forms, "target": target})
CategoriesList.add_item_action(
name=_("Category permissions"),
icon="fa fa-adjust",
link="misago:admin:categories:nodes:permissions",
style="success",
)
class RoleCategoriesACL(RoleAdmin, generic.ModelFormView):
templates_dir = "misago/admin/categoryroles"
template = "rolecategories.html"
def real_dispatch(self, request, target):
categories = Category.objects.all_categories()
roles = CategoryRole.objects.order_by("name")
if not categories:
messages.info(request, _("No categories exist."))
return redirect(self.root_link)
choices = {}
for choice in target.categories_acls.select_related("category_role"):
choices[choice.category_id] = choice.category_role
forms = []
forms_are_valid = True
for category in categories:
category.level_range = range(category.level - 1)
FormType = RoleCategoryACLFormFactory(
category, roles, choices.get(category.pk)
)
if request.method == "POST":
forms.append(FormType(request.POST, prefix=category.pk))
if not forms[-1].is_valid():
forms_are_valid = False
else:
forms.append(FormType(prefix=category.pk))
if request.method == "POST" and forms_are_valid:
target.categories_acls.all().delete()
new_permissions = []
for form in forms:
if form.cleaned_data["role"]:
new_permissions.append(
RoleCategoryACL(
role=target,
category=form.category,
category_role=form.cleaned_data["role"],
)
)
if new_permissions:
RoleCategoryACL.objects.bulk_create(new_permissions)
clear_acl_cache()
message = _("Category permissions for role %(name)s have been changed.")
messages.success(request, message % {"name": target.name})
if "stay" in request.POST:
return redirect(request.path)
return redirect(self.root_link)
return self.render(request, {"forms": forms, "target": target})
RolesList.add_item_action(
name=_("Categories permissions"),
icon="fa fa-comments-o",
link="misago:admin:permissions:users:categories",
style="success",
)
|
PypiClean
|
/PyFunceble-4.1.3.tar.gz/PyFunceble-4.1.3/README.rst
|
.. image:: https://raw.githubusercontent.com/PyFunceble/logo/master/Green/HD/RM.png
The tool to check the availability or syntax of domain, IP or URL
-----------------------------------------------------------------
.. image:: https://img.shields.io/badge/code%20style-black-000000.png
:target: https://github.com/ambv/black
.. image:: https://coveralls.io/repos/github/funilrys/PyFunceble/badge.png?branch=master
:target: https://coveralls.io/github/funilrys/PyFunceble?branch=master
.. image:: https://img.shields.io/github/license/funilrys/PyFunceble.png
:target: https://github.com/funilrys/PyFunceble/blob/master/LICENSE
.. image:: https://img.shields.io/pypi/v/pyfunceble.png
:target: https://pypi.org/project/pyfunceble
.. image:: https://img.shields.io/github/issues/funilrys/PyFunceble.png
:target: https://github.com/funilrys/PyFunceble/issues
.. image:: https://pepy.tech/badge/pyfunceble
:target: https://pepy.tech/project/pyfunceble
.. image:: https://pepy.tech/badge/pyfunceble/month
:target: https://pepy.tech/project/pyfunceble
.. image:: https://pepy.tech/badge/pyfunceble/week
:target: https://pepy.tech/project/pyfunceble
**PyFunceble** aims to provide an accurate availability check through the usage
of multiple sources which are for example - to only list a few:
- the WHOIS record(s).
- the DNS record(s).
- the HTTP status code.
PyFunceble can be included in your existing project through:
- its standard built-in CLI implementation.
- its `Python API`_.
- the `PyFunceble web-worker`_ project that provides the core functionalities
of PyFunceble behind a web API.
The PyFunceble CLI can test from a hosts file, a plain list of subjects, an
AdBlock filter list or even an RPZ record.
As of today, PyFunceble is running actively - if not daily - within several
servers, laptops, PCs, and Raspberry Pis. It is even used - thanks to our
auto continue mechanism - with CI engines like GitHub Action, Travis CI, or
GitLab CI.
Happy testing with PyFunceble!
.. image:: https://github.com/PyFunceble/gifs/raw/master/domain.gif
:target: https://github.com/PyFunceble/gifs/raw/master/domain.gif
.. _Python API: https://pyfunceble.readthedocs.io/en/latest/api/index.html
.. _PyFunceble web-worker: https://github.com/pyfunceble/web-worker
___________________________________________
Installation
------------
:code:`pip`
^^^^^^^^^^^
::
$ pip install --upgrade pyfunceble
$ pyfunceble --version
:code:`docker`
^^^^^^^^^^^^^^
::
$ docker pull pyfunceble/pyfunceble
$ docker run -it pyfunceble/pyfunceble --version
___________________________________________
Documentation as the place to be!
---------------------------------
Want to know more about details **PyFunceble**?
I invite you to read the documentation at https://pyfunceble.readthedocs.io/en/latest/!
Want a local copy? I get you covered!
Simply run the following and enjoy the documentation!
::
$ pip install --user -r requirements.docs.txt # Install dependencies.
$ cd docs/
$ make clean html
$ palemoon _build/html/index.html # palemoon or whatever browser you use.
.. note::
You are also invited to submit changes and improvement to the documentation
through a new Pull Request.
___________________________________________
Supporting the project
----------------------
`PyFunceble`_, `Dead-Hosts`_, and all other analog projects are powered by free
time and a lot of coffee!
This project helps you and/or you like it?
GitHub Sponsor
^^^^^^^^^^^^^^
`@funilrys`_ is part of the GitHub Sponsor program!
.. image:: https://github.com/PyFunceble/logo/raw/master/pyfunceble_github.png
:target: https://github.com/sponsors/funilrys
:height: 70px
`Sponsor me`_!
Ko-Fi
^^^^^
Don't want to use the GitHub Sponsor program ?
Single donations are welcome too!
.. image:: https://az743702.vo.msecnd.net/cdn/kofi3.png
:target: https://ko-fi.com/V7V3EH2Y
:height: 70px
`Buy me a coffee`_!
___________________________________________
Contributors
------------
Thanks to those awesome peoples for their awesome and crazy idea(s),
contribution(s) and or issue report which made or make `PyFunceble`_ a better tool.
::
_______ _ _ _ _
|__ __| | | | | | | |
| | | |__ __ _ _ __ | | _____ | |_ ___ _ _ ___ _ _ | |
| | | '_ \ / _` | '_ \| |/ / __| | __/ _ \ | | | |/ _ \| | | | | |
| | | | | | (_| | | | | <\__ \ | || (_) | | |_| | (_) | |_| | |_|
|_| |_| |_|\__,_|_| |_|_|\_\___/ \__\___/ \__, |\___/ \__,_| (_)
__/ |
|___/
- avatartw - `@avatartw`_
- Avinash Reddy - `@AvinashReddy3108`_
- BigDargon - `@bigdargon`_
- Daniel - `@dnmTX`_
- gwarser - `@gwarser`_
- Haris Gušić - `@veracioux`_
- hawkeye116477 - `@hawkeye116477`_
- Human Being - `@T145`_
- Imre Kristoffer Eilertsen - `@DandelionSprout`_
- jawz101 - `@jawz101`_
- keczuppp - `@keczuppp`_
- kowith337 - `@kowith337`_
- Mitchell Krog - `@mitchellkrogza`_
- NeolithEra - `@NeolithEra`_
- Odyseus - `@Odyseus`_
- opav - `@opav`_
- Reza Rizqullah - `@ybreza`_
- rusty-snake - `@rusty-snake`_
- ScriptTiger - `@ScriptTiger`_
- sjhgvr - `@sjhgvr`_
- speedmann - `@speedmann`_
- spirillen - `@spirillen`_
- The Unknown - `@AnonymousPoster`_
- WaLLy3K - `@WaLLy3K`_
- xxcriticxx - `@xxcriticxx`_
- Yuki2718 - `@Yuki2718`_
- Zachinquarantine - `@Zachinquarantine`_
- ZeroDot1 - `@ZeroDot1`_
___________________________________________
Special Thanks
--------------
Thanks to those awesome organization(s), tool(s) and or people(s) for
* Their awesome documentation
* Their awesome repository
* Their awesome tool/software/source code
* Their breaking reports
* Their contributions
* Their current work/purpose
* Their promotion of Py-Funceble
* Their support
* Their testings reports
which helped and/or still help me build, test and or make `PyFunceble`_ a better tool.
::
_______ _ _ _ _
|__ __| | | | | | | |
| | | |__ __ _ _ __ | | _____ | |_ ___ _ _ ___ _ _ | |
| | | '_ \ / _` | '_ \| |/ / __| | __/ _ \ | | | |/ _ \| | | | | |
| | | | | | (_| | | | | <\__ \ | || (_) | | |_| | (_) | |_| | |_|
|_| |_| |_|\__,_|_| |_|_|\_\___/ \__\___/ \__, |\___/ \__,_| (_)
__/ |
|___/
- Adam Warner - `@PromoFaux`_
- Adblock Plus - `@adblockplus`_
- asciinema - `@asciinema`_
- Bob Halley - `@rthalley`_ (`DNSPython`_)
- Chris Griffith - `@cdgriffith`_ (`Box`_)
- Daniel - `@dnmTX`_
- Jonathan Hartley - `@tartley`_ (`colorama`_)
- `IANA`_ - `ICANN`_ (`Root Zone Database`_)
- `Iterative`_ (`shtab`_)
- Kenneth Reitz - `@kennethreitz`_ (`requests`_)
- Mitchell Krog - `@mitchellkrogza`_
- Mohammad Fares - `@faressoft`_ (`Terminalizer`_)
- Pi-Hole - `@pi-hole`_
- Public Suffix List - `@publicsuffix`_
- Reza Rizqullah - `@ybreza`_
- Saurabh Kumar - `@theskumar`_ (`python-dotenv`_)
- ScriptTiger - `@ScriptTiger`_
- SMed79 - `@SMed79`_
- spirillen - `@spirillen`_
- The YAML Project - `@yaml`_ (`pyyaml`_)
- `yWorks`_ - (`yEd Graph Editor`_)
___________________________________________
License
-------
::
Copyright 2017, 2018, 2019, 2020, 2022 Nissar Chababy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
.. _Box: https://github.com/cdgriffith/Box
.. _colorama: https://github.com/tartley/colorama
.. _Dead-Hosts: https://github.com/dead-hosts
.. _DNSPython: https://github.com/rthalley/dnspython
.. _Funceble: https://github.com/funilrys/funceble
.. _IANA: https://www.iana.org/
.. _ICANN: https://www.icann.org/
.. _Iterative: https://github.com/iterative
.. _PyFunceble: https://github.com/funilrys/PyFunceble
.. _python-dotenv: https://github.com/theskumar/python-dotenv
.. _pyyaml: https://github.com/yaml/pyyaml
.. _requests: https://github.com/kennethreitz/requests
.. _Root Zone Database: https://www.iana.org/domains/root/db
.. _shtab: https://github.com/iterative/shtab
.. _Terminalizer: https://github.com/faressoft/terminalizer
.. _yEd Graph Editor: https://www.yworks.com/products/yed
.. _yWorks: https://www.yworks.com
.. _@adblockplus: https://github.com/adblockplus
.. _@AnonymousPoster: https://www.mypdns.org/p/AnonymousPoster/
.. _@asciinema: https://github.com/asciinema
.. _@avatartw: https://github.com/avatartw
.. _@AvinashReddy3108: https://github.com/AvinashReddy3108
.. _@bigdargon: https://github.com/bigdargon
.. _@cdgriffith: https://github.com/cdgriffith
.. _@DandelionSprout: https://github.com/DandelionSprout
.. _@dnmTX: https://github.com/dnmTX
.. _@faressoft: https://github.com/faressoft
.. _@funilrys: https://github.com/funilrys
.. _@gwarser: https://github.com/gwarser
.. _@hawkeye116477: https://github.com/hawkeye116477
.. _@jawz101: https://github.com/jawz101
.. _@keczuppp: https://github.com/keczuppp
.. _@kennethreitz: https://github.com/kennethreitz
.. _@kowith337: https://github.com/kowith337
.. _@mitchellkrogza: https://github.com/mitchellkrogza
.. _@NeolithEra: https://github.com/NeolithEra
.. _@Odyseus: https://github.com/Odyseus
.. _@opav: https://github.com/opav
.. _@pi-hole: https://github.com/pi-hole/pi-hole
.. _@PromoFaux: https://github.com/PromoFaux
.. _@publicsuffix: https://github.com/publicsuffix
.. _@rthalley: https://github.com/rthalley
.. _@rusty-snake: https://github.com/rusty-snake
.. _@ScriptTiger: https://github.com/ScriptTiger
.. _@sjhgvr: https://github.com/sjhgvr
.. _@SMed79: https://github.com/SMed79
.. _@speedmann: https://github.com/speedmann
.. _@spirillen: https://www.mypdns.org/p/Spirillen/
.. _@T145: https://github.com/T145
.. _@tartley: https://github.com/tartley
.. _@theskumar: https://github.com/theskumar
.. _@veracioux: https://github.com/veracioux
.. _@Wally3K: https://github.com/WaLLy3K
.. _@xxcriticxx: https://github.com/xxcriticxx
.. _@yaml: https://github.com/yaml
.. _@ybreza: https://github.com/ybreza
.. _@Yuki2718: https://github.com/Yuki2718
.. _@Zachinquarantine: https://github.com/Zachinquarantine
.. _@ZeroDot1: https://github.com/ZeroDot1
.. _documentation for more GIF: https://pyfunceble.readthedocs.io/en/latest/in-action.html
.. _Sponsor me: https://github.com/sponsors/funilrys
.. _Buy me a coffee: https://ko-fi.com/V7V3EH2Y
|
PypiClean
|
/pigweed-0.0.14.tar.gz/pigweed-0.0.14/pw_env_setup/json_visitor.py
|
"""Serializes an Environment into a JSON file."""
import json
# Disable super() warnings since this file must be Python 2 compatible.
# pylint: disable=super-with-arguments
class JSONVisitor(object): # pylint: disable=useless-object-inheritance
"""Serializes an Environment into a JSON file."""
def __init__(self, *args, **kwargs):
super(JSONVisitor, self).__init__(*args, **kwargs)
self._data = {}
def serialize(self, env, outs):
self._data = {
'modify': {},
'set': {},
}
env.accept(self)
json.dump(self._data, outs, indent=4, separators=(',', ': '))
outs.write('\n')
self._data = {}
def visit_set(self, set): # pylint: disable=redefined-builtin
self._data['set'][set.name] = set.value
def visit_clear(self, clear):
self._data['set'][clear.name] = None
def _initialize_path_like_variable(self, name):
default = {'append': [], 'prepend': [], 'remove': []}
self._data['modify'].setdefault(name, default)
def visit_remove(self, remove):
self._initialize_path_like_variable(remove.name)
self._data['modify'][remove.name]['remove'].append(remove.value)
if remove.value in self._data['modify'][remove.name]['append']:
self._data['modify'][remove.name]['append'].remove(remove.value)
if remove.value in self._data['modify'][remove.name]['prepend']:
self._data['modify'][remove.name]['prepend'].remove(remove.value)
def visit_prepend(self, prepend):
self._initialize_path_like_variable(prepend.name)
self._data['modify'][prepend.name]['prepend'].append(prepend.value)
if prepend.value in self._data['modify'][prepend.name]['remove']:
self._data['modify'][prepend.name]['remove'].remove(prepend.value)
def visit_append(self, append):
self._initialize_path_like_variable(append.name)
self._data['modify'][append.name]['append'].append(append.value)
if append.value in self._data['modify'][append.name]['remove']:
self._data['modify'][append.name]['remove'].remove(append.value)
def visit_echo(self, echo):
pass
def visit_comment(self, comment):
pass
def visit_command(self, command):
pass
def visit_doctor(self, doctor):
pass
def visit_blank_line(self, blank_line):
pass
def visit_function(self, function):
pass
def visit_hash(self, hash): # pylint: disable=redefined-builtin
pass
|
PypiClean
|
/experimental.portalfactoryfix-0.4.2.tar.gz/experimental.portalfactoryfix-0.4.2/README.txt
|
Overview
========
Experimental performance improvements to content creation for Plone. Primarily
this works by trying to ensure that portal factory does not cause writes to the
zodb, via making archetypes content portal factory aware such that they do not
attempt to register themselves in various global catalogs. Previously portal
factory would manually unindex the content after the request, but that would still
cause writes to global data structures, creating write conflict hotspots and zodb
bloat for what should be a stateless operation.
This issue is being tracked by Plone at the following URL.
https://dev.plone.org/plone/ticket/9672
This patch is for intended for existing versions of Plone (2x & 3x) to address
this issue.
It can be applied by adding the egg to a buildout, and including the package's
zcml.
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/groups/item/owners/ref/ref_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from .....models import reference_create, string_collection_response
from .....models.o_data_errors import o_data_error
class RefRequestBuilder():
"""
Provides operations to manage the collection of group entities.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new RefRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/groups/{group%2Did}/owners/$ref{?%24top,%24skip,%24search,%24filter,%24count,%24orderby}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
async def get(self,request_configuration: Optional[RefRequestBuilderGetRequestConfiguration] = None) -> Optional[string_collection_response.StringCollectionResponse]:
"""
The owners of the group who can be users or service principals. Nullable. If this property is not specified when creating a Microsoft 365 group, the calling user is automatically assigned as the group owner. Supports $filter (/$count eq 0, /$count ne 0, /$count eq 1, /$count ne 1); Supports $expand including nested $select. For example, /groups?$filter=startsWith(displayName,'Role')&$select=id,displayName&$expand=owners($select=id,userPrincipalName,displayName).
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[string_collection_response.StringCollectionResponse]
"""
request_info = self.to_get_request_information(
request_configuration
)
from .....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from .....models import string_collection_response
return await self.request_adapter.send_async(request_info, string_collection_response.StringCollectionResponse, error_mapping)
async def post(self,body: Optional[reference_create.ReferenceCreate] = None, request_configuration: Optional[RefRequestBuilderPostRequestConfiguration] = None) -> None:
"""
Create new navigation property ref to owners for groups
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = self.to_post_request_information(
body, request_configuration
)
from .....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_no_response_content_async(request_info, error_mapping)
def to_get_request_information(self,request_configuration: Optional[RefRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The owners of the group who can be users or service principals. Nullable. If this property is not specified when creating a Microsoft 365 group, the calling user is automatically assigned as the group owner. Supports $filter (/$count eq 0, /$count ne 0, /$count eq 1, /$count ne 1); Supports $expand including nested $select. For example, /groups?$filter=startsWith(displayName,'Role')&$select=id,displayName&$expand=owners($select=id,userPrincipalName,displayName).
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
def to_post_request_information(self,body: Optional[reference_create.ReferenceCreate] = None, request_configuration: Optional[RefRequestBuilderPostRequestConfiguration] = None) -> RequestInformation:
"""
Create new navigation property ref to owners for groups
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.POST
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
request_info.set_content_from_parsable(self.request_adapter, "application/json", body)
return request_info
@dataclass
class RefRequestBuilderGetQueryParameters():
"""
The owners of the group who can be users or service principals. Nullable. If this property is not specified when creating a Microsoft 365 group, the calling user is automatically assigned as the group owner. Supports $filter (/$count eq 0, /$count ne 0, /$count eq 1, /$count ne 1); Supports $expand including nested $select. For example, /groups?$filter=startsWith(displayName,'Role')&$select=id,displayName&$expand=owners($select=id,userPrincipalName,displayName).
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "count":
return "%24count"
if original_name == "filter":
return "%24filter"
if original_name == "orderby":
return "%24orderby"
if original_name == "search":
return "%24search"
if original_name == "skip":
return "%24skip"
if original_name == "top":
return "%24top"
return original_name
# Include count of items
count: Optional[bool] = None
# Filter items by property values
filter: Optional[str] = None
# Order items by property values
orderby: Optional[List[str]] = None
# Search items by search phrases
search: Optional[str] = None
# Skip the first n items
skip: Optional[int] = None
# Show only the first n items
top: Optional[int] = None
@dataclass
class RefRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[RefRequestBuilder.RefRequestBuilderGetQueryParameters] = None
@dataclass
class RefRequestBuilderPostRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
|
PypiClean
|
/lizard-ui-5.3.tar.gz/lizard-ui-5.3/lizard_ui/static/bootstrap/docs/assets/js/bootstrap-tooltip.js
|
!function ($) {
"use strict"; // jshint ;_;
/* TOOLTIP PUBLIC CLASS DEFINITION
* =============================== */
var Tooltip = function (element, options) {
this.init('tooltip', element, options)
}
Tooltip.prototype = {
constructor: Tooltip
, init: function (type, element, options) {
var eventIn
, eventOut
, triggers
, trigger
, i
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
this.enabled = true
triggers = this.options.trigger.split(' ')
for (i = triggers.length; i--;) {
trigger = triggers[i]
if (trigger == 'click') {
this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
} else if (trigger != 'manual') {
eventIn = trigger == 'hover' ? 'mouseenter' : 'focus'
eventOut = trigger == 'hover' ? 'mouseleave' : 'blur'
this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
}
}
this.options.selector ?
(this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
this.fixTitle()
}
, getOptions: function (options) {
options = $.extend({}, $.fn[this.type].defaults, this.$element.data(), options)
if (options.delay && typeof options.delay == 'number') {
options.delay = {
show: options.delay
, hide: options.delay
}
}
return options
}
, enter: function (e) {
var defaults = $.fn[this.type].defaults
, options = {}
, self
this._options && $.each(this._options, function (key, value) {
if (defaults[key] != value) options[key] = value
}, this)
self = $(e.currentTarget)[this.type](options).data(this.type)
if (!self.options.delay || !self.options.delay.show) return self.show()
clearTimeout(this.timeout)
self.hoverState = 'in'
this.timeout = setTimeout(function() {
if (self.hoverState == 'in') self.show()
}, self.options.delay.show)
}
, leave: function (e) {
var self = $(e.currentTarget)[this.type](this._options).data(this.type)
if (this.timeout) clearTimeout(this.timeout)
if (!self.options.delay || !self.options.delay.hide) return self.hide()
self.hoverState = 'out'
this.timeout = setTimeout(function() {
if (self.hoverState == 'out') self.hide()
}, self.options.delay.hide)
}
, show: function () {
var $tip
, pos
, actualWidth
, actualHeight
, placement
, tp
, e = $.Event('show')
if (this.hasContent() && this.enabled) {
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$tip = this.tip()
this.setContent()
if (this.options.animation) {
$tip.addClass('fade')
}
placement = typeof this.options.placement == 'function' ?
this.options.placement.call(this, $tip[0], this.$element[0]) :
this.options.placement
$tip
.detach()
.css({ top: 0, left: 0, display: 'block' })
// Lizard fix: appendTo
.appendTo(document.body)
// this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
pos = this.getPosition()
actualWidth = $tip[0].offsetWidth
actualHeight = $tip[0].offsetHeight
switch (placement) {
case 'bottom':
tp = {top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2}
break
case 'top':
tp = {top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2}
break
case 'left':
tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth}
break
case 'right':
tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width}
break
}
this.applyPlacement(tp, placement)
this.$element.trigger('shown')
}
}
, applyPlacement: function(offset, placement){
var $tip = this.tip()
, width = $tip[0].offsetWidth
, height = $tip[0].offsetHeight
, actualWidth
, actualHeight
, delta
, replace
$tip
.offset(offset)
.addClass(placement)
.addClass('in')
actualWidth = $tip[0].offsetWidth
actualHeight = $tip[0].offsetHeight
if (placement == 'top' && actualHeight != height) {
offset.top = offset.top + height - actualHeight
replace = true
}
if (placement == 'bottom' || placement == 'top') {
delta = 0
if (offset.left < 0){
delta = offset.left * -2
offset.left = 0
$tip.offset(offset)
actualWidth = $tip[0].offsetWidth
actualHeight = $tip[0].offsetHeight
}
this.replaceArrow(delta - width + actualWidth, actualWidth, 'left')
} else {
this.replaceArrow(actualHeight - height, actualHeight, 'top')
}
if (replace) $tip.offset(offset)
}
, replaceArrow: function(delta, dimension, position){
this
.arrow()
.css(position, delta ? (50 * (1 - delta / dimension) + "%") : '')
}
, setContent: function () {
var $tip = this.tip()
, title = this.getTitle()
$tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
$tip.removeClass('fade in top bottom left right')
}
, hide: function () {
var that = this
, $tip = this.tip()
, e = $.Event('hide')
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$tip.removeClass('in')
function removeWithAnimation() {
var timeout = setTimeout(function () {
$tip.off($.support.transition.end).detach()
}, 500)
$tip.one($.support.transition.end, function () {
clearTimeout(timeout)
$tip.detach()
})
}
$.support.transition && this.$tip.hasClass('fade') ?
removeWithAnimation() :
$tip.detach()
this.$element.trigger('hidden')
return this
}
, fixTitle: function () {
var $e = this.$element
if ($e.attr('title') || typeof($e.attr('data-original-title')) != 'string') {
$e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
}
}
, hasContent: function () {
return this.getTitle()
}
, getPosition: function () {
var el = this.$element[0]
return $.extend({}, (typeof el.getBoundingClientRect == 'function') ? el.getBoundingClientRect() : {
width: el.offsetWidth
, height: el.offsetHeight
}, this.$element.offset())
}
, getTitle: function () {
var title
, $e = this.$element
, o = this.options
title = $e.attr('data-original-title')
|| (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
return title
}
, tip: function () {
return this.$tip = this.$tip || $(this.options.template)
}
, arrow: function(){
return this.$arrow = this.$arrow || this.tip().find(".tooltip-arrow")
}
, validate: function () {
if (!this.$element[0].parentNode) {
this.hide()
this.$element = null
this.options = null
}
}
, enable: function () {
this.enabled = true
}
, disable: function () {
this.enabled = false
}
, toggleEnabled: function () {
this.enabled = !this.enabled
}
, toggle: function (e) {
// (Lizard) ensure click event is canceled for popovers
if (e) {
e.stopPropagation();
e.preventDefault();
}
var self = e ? $(e.currentTarget)[this.type](this._options).data(this.type) : this
self.tip().hasClass('in') ? self.hide() : self.show()
}
, destroy: function () {
this.hide().$element.off('.' + this.type).removeData(this.type)
}
}
/* TOOLTIP PLUGIN DEFINITION
* ========================= */
var old = $.fn.tooltip
$.fn.tooltip = function ( option ) {
return this.each(function () {
var $this = $(this)
, data = $this.data('tooltip')
, options = typeof option == 'object' && option
if (!data) $this.data('tooltip', (data = new Tooltip(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.tooltip.Constructor = Tooltip
$.fn.tooltip.defaults = {
animation: true
, placement: 'top'
, selector: false
, template: '<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>'
, trigger: 'hover focus'
, title: ''
, delay: 0
, html: false
, container: false
}
/* TOOLTIP NO CONFLICT
* =================== */
$.fn.tooltip.noConflict = function () {
$.fn.tooltip = old
return this
}
}(window.jQuery);
|
PypiClean
|
/spinnaker_spinner-2.5.0.tar.gz/spinnaker_spinner-2.5.0/spinner/coordinates.py
|
from collections import namedtuple
################################################################################
# Base Classes (Internal Use Only)
################################################################################
class _ElementwiseCoordsMixin(object):
"""
Support for common operations on coordinates for which simple element-wise
operators are adequate.
"""
def __add__(self, other):
"""
Performs element-wise subtraction.
"""
assert(len(self) == len(other))
return type(self)(*(a+b for (a,b) in zip(self,other)))
def __sub__(self, other):
"""
Performs element-wise subtraction.
"""
assert(len(self) == len(other))
return type(self)(*(a-b for (a,b) in zip(self,other)))
def __abs__(self):
"""
Element-wise absolute.
"""
return type(self)(*(abs(v) for v in self))
def __repr__(self):
return "%s(%s)"%(
type(self).__name__,
", ".join(map(repr, self))
)
class _HexCoordsMixin(_ElementwiseCoordsMixin):
"""
Support for common operations on hexagonal coordinates.
"""
def __init__(self):
_ElementwiseCoordsMixin.__init__(self)
def magnitude(self):
"""
Magnitude
"""
# Pad to a 3-field value if 2D version.
v = (list(self) + [0])[:3]
from spinner import topology
return topology.manhattan(topology.to_shortest_path(v))
class _CartesianCoordsMixin(_ElementwiseCoordsMixin):
"""
Support for common operations on Cartesian coordinates.
"""
def __init__(self, *args, **kwargs):
_ElementwiseCoordsMixin.__init__(self)
def to_positive(self):
"""
Return a positive-only version of the coordinate.
"""
return type(self)(*(abs(v) for v in self))
def magnitude(self):
"""
Magnitude (Euclidean distance)
"""
from spinner import topology
return topology.euclidean(self)
_HexagonalTuple = namedtuple("_HexagonalTuple", ["x","y","z"])
_Hexagonal2DTuple = namedtuple("_Hexagonal2DTuple", ["x","y"])
_Cartesian2DTuple = namedtuple("_Cartesian2DTuple", ["x","y"])
_Cartesian3DTuple = namedtuple("_Cartesian3DTuple", ["x","y","z"])
_CabinetTuple = namedtuple("_CabinetTuple", ["cabinet","frame","board"])
################################################################################
# Front-end Classes
################################################################################
"""
Hexagonal coordinate system conventionally used when working with a hexagonal
mesh such a SpiNNaker nodes or boards.
Note: The three axes are non-orthogonal and so various strange things can happen
when working with such schemes. See the topology module for various useful
functions.
"""
class Hexagonal(_HexCoordsMixin, _HexagonalTuple):
def __init__(self, *args, **kwargs):
_HexCoordsMixin.__init__(self)
"""
Special case of Hexagonal. Represents the Hexagonal() value with z fixed as 0.
"""
class Hexagonal2D(_HexCoordsMixin , _Hexagonal2DTuple):
def __init__(self, *args, **kwargs):
_HexCoordsMixin.__init__(self)
"""
Cartesian coordinates in either 2D or 3D space.
"""
class Cartesian2D(_CartesianCoordsMixin, _Cartesian2DTuple):
def __init__(self, *args, **kwargs):
_CartesianCoordsMixin.__init__(self)
class Cartesian3D(_CartesianCoordsMixin, _Cartesian3DTuple):
def __init__(self, *args, **kwargs):
_CartesianCoordsMixin.__init__(self)
"""
Logical coordinates for locations in a series of cabinets containing frames
containing boards, like so::
2 1 0
Cabinet --+-------------+----------------+
| | |
+-------------+ +-------------+ +-------------+ Frame
| | | | | | |
| +---------+ | | +---------+ | | +---------+ | |
| | : : : : | | | | : : : : | | | | : : : : |--------+ 0
| | : : : : | | | | : : : : | | | | : : : : | | |
| +---------+ | | +---------+ | | +---------+ | |
| | : : : :#| | | | : : : : | | | | : : : : |--------+ 1
| | : : : :#| | | | : : : : | | | | : : : : | | |
| +---------+ | | +---------+ | | +---------+ | |
| | : : : : | | | | : : : : | | | | : : : : |--------+ 2
| | : : : : | | | | : : : : | | | | : : : : | | |
| +---------+ | | +---------+ | | +---------+ | |
| | : : : : | | | | : : : : | | | | : : : : |--------+ 3
| | : : : : | | | | : : : : | | | | : : : : | |
| +---------+ | | +|-|-|-|-|+ | | +---------+ |
| | | | | | | | | | |
+-------------+ +--|-|-|-|-|--+ +-------------+
| | | | |
Board -----+-+-+-+-+
4 3 2 1 0
In this example there are 3 cabinets each containing 4 frames which in turn
contain 5 boards.
Cabinets are numbered from 0 right-to-left. Frames are numbered from 0
top-to-bottom. Boards are numbered from 0 right-to-left. Therefore, the board
marked with "#" is at the coordinate (2,1,0).
"""
class Cabinet(_ElementwiseCoordsMixin, _CabinetTuple):
def __init__(self, *args, **kwargs):
_ElementwiseCoordsMixin.__init__(self)
|
PypiClean
|
/ezcad_plugins-0.2.4-cp37-cp37m-win_amd64.whl/ezcad_plugins/point_menubar.py
|
import os
import sys
if os.path.realpath(os.path.dirname(__file__)) not in sys.path:
sys.path.append(os.path.realpath(os.path.dirname(__file__)))
from ezcad.config.base import _
from ezcad.widgets.mode_switch import PluginMenuBar
from ezcad.utils.qthelpers import MENU_SEPARATOR, create_action, add_actions
from gopoint.bartender import Bartender
class PointMenuBar(PluginMenuBar):
NAME = "Point Menubar"
def __init__(self):
super().__init__()
self.treebase = None
self.bartender = None
self.new_menu = self.addMenu(_("New"))
self.edit_menu = self.addMenu(_("Edit"))
self.tool_menu = self.addMenu(_("Tool"))
self.gate_menu = self.addMenu(_("Gate"))
def setup(self):
# call bartender after set treebase
self.bartender = Bartender(self.treebase)
self.make_actions()
self.new_menu_actions = [
self.act_new_random,
self.act_new_coord,
MENU_SEPARATOR,
self.act_new_xyz_from_existing_point,
self.act_new_from_surf,
self.act_merge_points,
MENU_SEPARATOR,
self.act_new_from_prop_value_range,
]
self.edit_menu_actions = [
self.act_translate_xyz,
# self.act_separate_vertexes_near_fault,
self.act_flip_depth,
MENU_SEPARATOR,
self.act_copy_property,
self.act_calc_line_numbers,
MENU_SEPARATOR,
self.act_crop_by_polygon,
self.act_clip_by_prop_value_range,
]
self.tool_menu_actions = [
self.act_zoep_modeling,
self.act_cross_plot,
]
self.gate_menu_actions = [
self.act_import_gocad_vset,
self.act_import_chd,
self.act_import_shapefile,
self.act_import_numpy,
self.act_import_csv_file,
# self.act_import_csv_files,
MENU_SEPARATOR,
self.act_export_ascii,
]
add_actions(self.new_menu, self.new_menu_actions)
add_actions(self.edit_menu, self.edit_menu_actions)
add_actions(self.tool_menu, self.tool_menu_actions)
add_actions(self.gate_menu, self.gate_menu_actions)
def make_actions(self):
self.act_new_random = create_action(self, _('From random numbers'),
triggered=self.bartender.new_from_random_numbers)
self.act_new_coord = create_action(self, _('From coordinates'),
triggered=self.bartender.new_from_coord)
self.act_new_xyz_from_existing_point = create_action(self,
_('New XYZ from existing point'),
triggered=self.bartender.new_from_existing_point)
self.act_new_from_prop_value_range = create_action(self,
_('Subset from property value range'),
triggered=self.bartender.subset_from_prop_range)
self.act_crop_by_polygon = create_action(self,
_('Crop by polygon'),
triggered=self.bartender.crop_by_polygon)
self.act_clip_by_prop_value_range = create_action(self,
_('Clip by property value range'),
triggered=self.bartender.clip_by_prop_range)
self.act_merge_points = create_action(self,
_('From merging points'),
triggered=self.bartender.merge_points)
self.act_new_from_surf = create_action(self,
_('From surface vertexes'),
triggered=self.bartender.new_from_surf)
self.act_translate_xyz = create_action(self, _('Translate XYZ'),
triggered=self.bartender.object_translate_xyz)
# self.act_separate_vertexes_near_fault = create_action(self,
# _('Separate vertexes near fault'),
# triggered=self.bartender.separate_vertexes_near_fault)
self.act_flip_depth = create_action(self, _('Flip depth'),
triggered=self.bartender.object_flip_depth)
self.act_copy_property = create_action(self, _('Copy property'),
triggered=self.bartender.open_copy_property)
self.act_calc_line_numbers = create_action(self,
_('Calculate line numbers'),
triggered=self.bartender.calc_line_numbers)
self.act_zoep_modeling = create_action(self, _('Zoeppritz modeling'),
triggered=self.bartender.zoep_modeling)
self.act_cross_plot = create_action(self, _('Cross plot'),
triggered=self.bartender.cross_plot)
self.act_import_gocad_vset = create_action(self,
_('Import Gocad VSet file (.vs)'),
triggered=self.bartender.import_gocad_vset)
self.act_import_chd = create_action(self,
_('Import SeisSpace header dump'),
triggered=self.bartender.import_chd)
# self.act_import_sdt = create_action(self,
# _('Import Space delimited txt'),
# triggered=self.bartender.import_sdt)
self.act_import_shapefile = create_action(self,
_('Import ESRI Shapefile'),
triggered=self.bartender.import_shapefile)
self.act_import_numpy = create_action(self,
_('Import Numpy load text'),
triggered=self.bartender.import_numpy)
self.act_import_csv_file = create_action(self, _('Import CSV file'),
triggered=self.bartender.import_csv)
# self.act_import_csv_files = create_action(self, _('CSV files'),
# triggered=self.bartender.import_csv_files)
self.act_export_ascii = create_action(self, _('Export ASCII file'),
triggered=self.bartender.export_ascii)
|
PypiClean
|
/llnl_hatchet-2023.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/hatchet/vis/scripts/cct/cct_menu_view.js
|
import { makeSignaller, d3, globals } from "./cct_globals";
import View from "../utils/view";
class MenuView extends View{
/**
* View class for the menu portion of the visualization
*
* @param {DOMElement} elem - The current cell of the calling jupyter notebook
* @param {Model} model - The model object
*/
constructor(elem, model){
super(elem, model);
this._svg = null;
this._svgButtonOffset = 0;
this.categories = ['Metrics', 'Display', 'Query']
this.model_var_map = {};
this.menu_tree = [];
this.width = elem.clientWidth - globals.layout.margin.right - globals.layout.margin.left;
this.height = globals.treeHeight * (model.forest.numberOfTrees + 1);
this.menu_height = '2em';
this.menu_bg_color = 'rgba(100,100,100,1)';
//state things
this.prior_submenu = null;
this.model.state.menu_active = false;
this._setUpMenuTree();
this._renderMenuBar();
this._preRender();
}
_makeSubmenuOption(text, type, options, model_var, callback){
/**
* Makes a data-based defintion of a submenu option which
* can be used with d3 to make our menu.
* @param {String} text - Text which will appear to the user on the submenu option
* @param {String} type - Describes the type of submenu option: 'button', 'dropdown' or 'toggle'
* @param {Array{String}} options - Array of strings which are the dropdown options
* for 'dropdown' buttons
* @param {String} model_var - A key which accesses a variable in the model that our submenu option
* state is dependant on, so active selections can be 'checkmarked', etc.
* @param {Function} callback - A callback function which runs when a user clicks the submenu button,
* or a dropdown option in the case of a 'dropdown' type
*/
this.model_var_map[text] = model_var;
if(options != null){
return {'text':text, 'type':type, 'options':options, 'onselect':callback}
}
return {'text':text, 'type':type, 'onclick':callback}
}
_setUpMenuTree(){
/**
* Creates a hierarchical data structure which is used by the render functions to load
* a dropdown menu. This data model supports three types of
* interactions on buttons 'dropdown' (a list of options which users can click and select from),
* 'click' (a generic button), 'toggle' (a single option toggle on/off)
*/
let model = this.model;
let rootNodeNames = model.forest.rootNodeNames;
let metricColumns = model.forest.metricColumns;
let colors = model.data["colors"];
let legends = model.data["legends"];
let self = this;
this.categories.forEach(d=>{
this.menu_tree[d] = [];
})
//add metrics submenu
this.menu_tree.Metrics.push(
this._makeSubmenuOption('Color (Primary Metric)', 'dropdown',
metricColumns,
'primaryMetric',
function(evt_sel, val){
self.observers.notify({
type: globals.signals.METRICCHANGE,
newMetric: val,
source: "primary"
})
}
)
);
this.menu_tree.Metrics.push(
this._makeSubmenuOption('Size (Secondary Metric)', 'dropdown',
metricColumns,
'secondaryMetric',
function(evt_sel, val){
self.observers.notify({
type: globals.signals.METRICCHANGE,
newMetric: val,
source: "secondary"
})
}
)
);
//add display submenu
this.menu_tree.Display.push(
this._makeSubmenuOption('Tree Select', 'dropdown',
rootNodeNames,
'activeTree',
function (evt_sel, val) {
self.observers.notify({
type: globals.signals.TREECHANGE,
display: val
});
}
)
)
this.menu_tree.Display.push(
this._makeSubmenuOption('Color Map', 'dropdown',
colors,
'colorText',
function (evt_sel, val) {
self.observers.notify({
type: globals.signals.COLORCLICK,
value: val
})
}
)
)
this.menu_tree.Display.push(
this._makeSubmenuOption('Legends', 'dropdown',
legends,
'legendText',
function (evt_sel, val) {
self.observers.notify({
type: globals.signals.LEGENDCLICK,
value: val
});
}
)
)
this.menu_tree.Display.push(
this._makeSubmenuOption('Reset View', 'button',
null,
null,
function (evt_sel) {
self.observers.notify({
type: globals.signals.RESETVIEW
})
}
)
)
//add query/filter
this.menu_tree['Query'].push(
this._makeSubmenuOption('Select Nodes', 'toggle',
null,
'brushOn',
function (evt_sel) {
self.observers.notify({
type: globals.signals.TOGGLEBRUSH
})
}
)
)
this.menu_tree['Query'].push(
this._makeSubmenuOption('Mass Prune', 'toggle',
null,
'pruneEnabled',
function(evt_sel){
self.observers.notify({
type: globals.signals.ENABLEMASSPRUNE,
threshold: 1.5
})
}
)
)
this.menu_tree['Query'].push(
this._makeSubmenuOption('Get Snapshot Query', 'button',
null,
null,
function(evt_sel){
window.alert("Query describing your current tree has been stored.\n Please use: \n\n\t%cct_fetch_query <python_variable>\n\n to retrieve your query back to the notebook.")
self.observers.notify({
type: globals.signals.SNAPSHOT
})
}
)
)
}
_addNewMenuButton(id, text, click){
/**
* Function created to remove some repeated code blocks and make it
* easier to add SVG buttons with a more dynamic left offset.
*
* @param {string} id - The HTML id which will be affixed to the newly created button
* @param {string} text - The text to be displayed on the button itself
* @param {function} click - Callback function to be called on click of button.
*/
var buttonPad = 5;
var textPad = 3;
var button = this._svg.append('g')
.attr('id', id)
.append('rect')
.attr('height', '15px')
.attr('x', this._svgButtonOffset).attr('y', 0).attr('rx', 5)
.style('fill', '#ccc')
.on('click', click);
var buttonText = d3.select(this.elem).select('#'+id).append('text')
.attr("x", this._svgButtonOffset + textPad)
.attr("y", 12)
.text(text)
.attr("font-family", "sans-serif")
.attr("font-size", "12px")
.attr('cursor', 'pointer')
.on('click', click);
var width = buttonText.node().getBBox().width + 2*textPad;
button.attr('width', width);
this._svgButtonOffset += width + buttonPad;
}
_handleSubmenuVisibility(d){
const self = this;
let submenu = d3.select(self.elem).select(`.${d}-submenu`);
if(self.model.state.menu_active && submenu.style('visibility') == 'hidden'){
submenu.style('visibility', 'visible');
}
else{
submenu.style('visibility', 'hidden');
}
}
_renderMenuBar(){
/**
* Renders a windows/unix style top menu bar
*/
//render a grey rectangle where categories sit
// 2em tall, full width
let menu_svg = d3.select(this.elem).append("svg").attr("class", "menu-svg");
this._svg = menu_svg;
const self = this;
const buttonPad = 10;
const bg_color = this.menu_bg_color;
const menu_height = '2em';
const text_v_offset = '1.4em';
const vis_name_text = 'Interactive Calling Context Tree'
menu_svg.attr('width', this.elem.clientWidth)
.attr('height', menu_height)
menu_svg.append('rect')
.attr('width', this.elem.clientWidth)
.attr('height', menu_height)
.style('fill', bg_color);
let vis_name = menu_svg
.append('text')
.text(vis_name_text)
.attr('x', this.elem.clientWidth)
.attr('y', 20)
.attr("font-family", "sans-serif")
.attr("font-size", "14px")
.style('fill', 'rgba(256,256,256,1)');
vis_name.attr('x', function(){
return self.elem.clientWidth - d3.select(this).node().getBBox().width - 25;
})
let options = menu_svg
.selectAll('.option')
.data(this.categories);
let op_grp = options.enter()
.append('g')
.attr('class', 'option')
.attr('cursor', 'pointer')
.on('mouseover',function(d){
d3.select(this)
.select('.menu-button')
.style('fill', 'rgba(150,150,150,1)');
if(self.model.state.menu_active && (d !== self.prior_submenu)){
self._handleSubmenuVisibility(d);
self._handleSubmenuVisibility(self.prior_submenu);
self.prior_submenu = d;
}
})
.on('mouseout', function(){
d3.select(this)
.select('.menu-button')
.style('fill', bg_color);
})
.on('click', function(d){
self.observers.notify({type: globals.signals.TOGGLEMENU});
self.prior_submenu = d;
self._handleSubmenuVisibility(d);
});
op_grp.append('rect')
.attr('class','menu-button')
.attr('height', menu_height)
.style('fill', bg_color);
op_grp.append('text')
.text(d=>{return d})
.attr('class','menu-text')
.attr('y', text_v_offset)
.attr('x', buttonPad)
.attr("font-family", "sans-serif")
.attr("font-size", "14px")
.style('fill', 'rgba(256,256,256,1)');
let offset = 0;
op_grp.each(function(d){
let btn_grp = d3.select(this);
let btn_width = btn_grp.select(".menu-text").node().getBBox().width + 2*buttonPad
//apply modified widths and offsets
btn_grp.select(".menu-button")
.attr('width', btn_width)
.style('stroke', 'black')
.style('stroke-dasharray', `0, ${btn_width}, ${menu_height}, ${btn_width}, ${menu_height}, 0`);
btn_grp.attr('transform', `translate(${offset}, 0)`);
self._addSubmenu(d, self.menu_tree[d], offset);
offset += btn_width;
})
}
_addSubmenu(submenu_name, submenu_options, x_offset){
/**
* Renders a submenu drop down under a top level menu button.
*/
let view_left = this.elem.getBoundingClientRect().left - this.elem.parentNode.getBoundingClientRect().left;
let view_top = this._svg.select('rect').node().getBBox().height;
const button_pad = 10;
const self = this;
let submenu_window = d3.select(this.elem)
.append('svg')
.style('position', 'absolute')
.style('top', view_top + 16 + 'px')
.style('left', view_left + x_offset + 5 + 'px')
.style('width', '600px')
.attr('class', `${submenu_name}-submenu`)
.style('visibility', 'hidden')
.append('g')
.attr('class', 'submenu-grp');
let border = submenu_window.append('rect')
.attr('height', submenu_options.length*25)
.style('stroke', 'white')
.style('stroke-width', 2);
let opt = submenu_window
.selectAll('.submenu-button')
.data(submenu_options);
let btn = opt.enter()
.append('g')
.attr('class', 'submenu-button')
.attr('transform', (_,i)=>{return `translate(0, ${i*25+1})`})
.attr('cursor', 'pointer')
.on('mouseover',function(){
d3.select(this)
.select('.submenu-button-rect')
.style('fill', 'rgba(150,150,150,1)');
})
.on('mouseout', function(){
d3.select(this)
.select('.submenu-button-rect')
.style('fill', self.menu_bg_color);
})
.on('click', function(d){
if(d.type != 'dropdown') d.onclick(this);
});
let bar = btn.append('rect')
.attr('class', 'submenu-button-rect')
.attr('height', 25)
.style('fill', this.menu_bg_color);
btn.append('text')
.text((d)=>{return d['text']})
.attr('class', 'submenu-button-text')
.attr('y', '1.2em')
.attr('x', button_pad)
.attr("font-family", "sans-serif")
.attr("font-size", "14px")
.style('fill', 'rgba(256,256,256,1)');
let max_width = 0;
btn.each(function(d){
max_width = Math.max(this.getBBox().width, max_width);
})
let barwidth = max_width + 2*button_pad + 25;
bar.attr('width', barwidth);
border.attr('width', barwidth);
btn.each(function(d){
let this_button = d3.select(this);
if(d.type == 'dropdown'){
self._makeDropDownMenu(this_button, d.options, barwidth, d.onselect);
this_button.on('mouseenter', function(){
let submenu = d3.select(this).select(`.cct-dropdown-menu`);
submenu.style('visibility', 'visible');
})
.on('mouseleave', function(){
let submenu = d3.select(this).select(`.cct-dropdown-menu`);
submenu.style('visibility', 'hidden');
})
}
})
btn.append('text')
.text((d)=>{ if(d.type == 'dropdown') return '▸'; })
.attr('class', 'submenu-icon')
.attr('y', '1.2em')
.attr('x', max_width + 25)
.attr("font-family", "sans-serif")
.attr("font-size", "14px")
.style('fill', 'rgba(256,256,256,1)');
}
_makeDropDownMenu(button, options, xoffset, callback){
/**
* Renders a list of options under a 'dropdown' submenu option.
*/
let xorigin = xoffset;
let yorigin = button.node().getBBox().y;
let button_pad = 10;
let self = this;
let selections = button.append('g')
.attr('class', 'cct-dropdown-menu')
.attr('transform', `translate(${xorigin}, ${yorigin})`)
.style('visibility', 'hidden');
selections.append('rect')
.attr('height', options.length*25)
.attr('width', 150)
.style('stroke', 'white')
.style('stroke-width', 2);
let sel = selections.selectAll('.cct-dropdown-option')
.data(options);
let opt = sel.enter()
.append('g')
.attr('class', 'cct-dropdown-option')
.attr('transform', (_,i)=>{return `translate(0, ${i*25})`})
.attr('cursor', 'pointer')
.on('mouseenter', function(){
d3.select(this).select('.cct-dropdown-option-rect').style('fill', 'rgba(150,150,150,1)');
})
.on('mouseleave', function(){
d3.select(this).select('.cct-dropdown-option-rect').style('fill', self.menu_bg_color);
})
.on('click', function(d){
callback(this, d);
});
opt.append('rect')
.attr('height', 25)
.attr('width', 150)
.attr('class', 'cct-dropdown-option-rect')
.style('fill', this.menu_bg_color);
opt.append('text')
.text((d)=>{return d})
.attr('class', 'cct-dropdown-option-text')
.attr('y', '1.2em')
.attr('x', button_pad)
.attr("font-family", "sans-serif")
.attr("font-size", "14px")
.style('fill', 'rgba(256,256,256,1)');
opt.append('text')
.text((_, i)=>{ if(i == 0) return '✓'; })
.attr('class', 'cct-dropdown-icon')
.attr('y', '1.2em')
.attr('x', 130)
.attr("font-family", "sans-serif")
.attr("font-size", "16px")
.style('fill', 'rgba(256,256,256,1)');
}
_preRender(){
const self = this;
// ----------------------------------------------
// Define and set d3 callbacks for changes
// ----------------------------------------------
let brush = d3.brush()
.extent([[0, 0], [2 * this.width, 2 * (this.height + globals.layout.margin.top + globals.layout.margin.bottom)]])
.on('brush', function(){
self.observers.notify({
type: globals.signals.BRUSH,
selection: d3.event.selection,
end: false
})
})
.on('end', function(){
self.observers.notify({
type: globals.signals.BRUSH,
selection: d3.event.selection,
end: true
})
});
//brush group
d3.select("#mainG").append('g')
.attr('class', 'brush')
.call(brush);
this.brush = brush;
}
render(){
/**
* Core call for drawing menu related screen elements
*/
const self = this;
let model = this.model;
//toggleable events
let pruneEnabled = model.state["pruneEnabled"];
let brushOn = model.state["brushOn"];
if(!model.state.menu_active && self.prior_submenu){
let submenu = d3.select(self.elem).select(`.${self.prior_submenu}-submenu`);
if(submenu.style('visibility') != 'hidden'){
submenu.style('visibility', 'hidden');
}
}
for(let option of this.categories){
let submenuopts = d3.select(this.elem)
.select(`.${option}-submenu`)
.selectAll('.submenu-button');
submenuopts.each(
function(d){
let dropdownopts = d3.select(this).selectAll(`.cct-dropdown-option`);
dropdownopts
.selectAll('.cct-dropdown-icon')
.text((v)=>{
if(model.state[self.model_var_map[d.text]] == v){
return ('✓');
}
return ('');
});
if(d.type == 'toggle'){
if(pruneEnabled && d.text == 'Mass Prune'){
d3.select(this).select('.submenu-icon').text('✓');
}else if(!pruneEnabled && d.text == 'Mass Prune'){
d3.select(this).select('.submenu-icon').text('');
}
else if(d.text == 'Select Nodes'){
if(brushOn > 0){
d3.select(this).select('.submenu-icon').text('✓');
}
else if(brushOn < 0){
d3.select(this).select('.submenu-icon').text('');
}
}
}
}
)
}
d3.select(this.elem).selectAll('.brush').remove();
//add brush if there should be one
if(brushOn > 0){
this._svg.select("#mainG").append('g')
.attr('class', 'brush')
.call(this.brush);
}
}
}
export default MenuView;
|
PypiClean
|
/argparse-schema-0.0.7.tar.gz/argparse-schema-0.0.7/README.md
|
# argparse-schema [](https://travis-ci.com/FebruaryBreeze/argparse-schema) [](https://codecov.io/gh/FebruaryBreeze/argparse-schema) [](https://pypi.org/project/argparse-schema/)
Parse Argument with JSON Schema.
## Installation
Need Python 3.6+.
```bash
pip install argparse-schema
```
## Usage
[Schema](./tests/argument_config.json):
```json
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "argument_config",
"description": "Arg-parse Schema UnitTest",
"type": "object",
"properties": {
"config": {
"type": "string",
"positional": true,
"description": "path of config file"
},
"resume": {
"type": "boolean",
"description": "resume from checkpoint or not"
},
"scale": {
"type": "number",
"default": 1.0,
"description": "scale of image"
},
"mode": {
"enum": [
"happy",
"high",
"heaven"
],
"default": "happy",
"description": "speed mode"
}
},
"required": [
"config"
]
}
```
Python Code:
```python
# demo.py
import argparse_schema
print(argparse_schema.parse(schema='./tests/argument_config.json'))
```
Run with arguments:
```bash
python3 demo.py /path/to/config.py
#> {'config': '/path/to/config.py', 'resume': False, 'scale': 1.0, 'mode': 'happy'}
```
CLI:
```bash
argparse-schema tests/argument_config.json
#> Show help for schema file [tests/argument_config.json]:
#> usage: YOUR-COMMAND [-h] [--resume] [--scale SCALE]
#> [--mode {happy,high,heaven}]
#> config
#>
#> Arg-parse Schema UnitTest
#>
#> positional arguments:
#> config path of config file
#>
#> optional arguments:
#> -h, --help show this help message and exit
#> --resume resume from checkpoint or not
#> --scale SCALE scale of image, [1.0] in default
#> --mode {happy,high,heaven}
#> speed mode, [happy] in default
```
|
PypiClean
|
/nb_offline_convert-0.1.1.tar.gz/nb_offline_convert-0.1.1/share/jupyter/nbconvert/templates/lab_offline/static/MathJax/jax/output/CommonHTML/jax.js
|
(function(o,f,m,a){var d;var g=MathJax.Object.isArray;var e,q,h;var k=1,l=0.1,j=0.025,b=0.025;var p={".mjx-chtml":{display:"inline-block","line-height":0,"text-indent":0,"text-align":"left","text-transform":"none","font-style":"normal","font-weight":"normal","font-size":"100%","font-size-adjust":"none","letter-spacing":"normal","word-wrap":"normal","word-spacing":"normal","white-space":"nowrap","float":"none",direction:"ltr","max-width":"none","max-height":"none","min-width":0,"min-height":0,border:0,margin:0,padding:"1px 0"},".MJXc-display":{display:"block","text-align":"center",margin:"1em 0",padding:0},".mjx-chtml[tabindex]:focus, body :focus .mjx-chtml[tabindex]":{display:"inline-table"},".mjx-full-width":{"text-align":"center",display:"table-cell!important",width:"10000em"},".mjx-math":{display:"inline-block","border-collapse":"separate","border-spacing":0},".mjx-math *":{display:"inline-block","-webkit-box-sizing":"content-box!important","-moz-box-sizing":"content-box!important","box-sizing":"content-box!important","text-align":"left"},".mjx-numerator":{display:"block","text-align":"center"},".mjx-denominator":{display:"block","text-align":"center"},".MJXc-stacked":{height:0,position:"relative"},".MJXc-stacked > *":{position:"absolute"},".MJXc-bevelled > *":{display:"inline-block"},".mjx-stack":{display:"inline-block"},".mjx-op":{display:"block"},".mjx-under":{display:"table-cell"},".mjx-over":{display:"block"},".mjx-over > *":{"padding-left":"0px!important","padding-right":"0px!important"},".mjx-under > *":{"padding-left":"0px!important","padding-right":"0px!important"},".mjx-stack > .mjx-sup":{display:"block"},".mjx-stack > .mjx-sub":{display:"block"},".mjx-prestack > .mjx-presup":{display:"block"},".mjx-prestack > .mjx-presub":{display:"block"},".mjx-delim-h > .mjx-char":{display:"inline-block"},".mjx-surd":{"vertical-align":"top"},".mjx-mphantom *":{visibility:"hidden"},".mjx-merror":{"background-color":"#FFFF88",color:"#CC0000",border:"1px solid #CC0000",padding:"2px 3px","font-style":"normal","font-size":"90%"},".mjx-annotation-xml":{"line-height":"normal"},".mjx-menclose > svg":{fill:"none",stroke:"currentColor"},".mjx-mtr":{display:"table-row"},".mjx-mlabeledtr":{display:"table-row"},".mjx-mtd":{display:"table-cell","text-align":"center"},".mjx-label":{display:"table-row"},".mjx-box":{display:"inline-block"},".mjx-block":{display:"block"},".mjx-span":{display:"inline"},".mjx-char":{display:"block","white-space":"pre"},".mjx-itable":{display:"inline-table",width:"auto"},".mjx-row":{display:"table-row"},".mjx-cell":{display:"table-cell"},".mjx-table":{display:"table",width:"100%"},".mjx-line":{display:"block",height:0},".mjx-strut":{width:0,"padding-top":k+"em"},".mjx-vsize":{width:0},".MJXc-space1":{"margin-left":".167em"},".MJXc-space2":{"margin-left":".222em"},".MJXc-space3":{"margin-left":".278em"},".mjx-chartest":{display:"block",visibility:"hidden",position:"absolute",top:0,"line-height":"normal","font-size":"500%"},".mjx-chartest .mjx-char":{display:"inline"},".mjx-chartest .mjx-box":{"padding-top":"1000px"},".MJXc-processing":{visibility:"hidden",position:"fixed",width:0,height:0,overflow:"hidden"},".MJXc-processed":{display:"none"},".mjx-test":{"font-style":"normal","font-weight":"normal","font-size":"100%","font-size-adjust":"none","text-indent":0,"text-transform":"none","letter-spacing":"normal","word-spacing":"normal",overflow:"hidden",height:"1px"},".mjx-test.mjx-test-display":{display:"table!important"},".mjx-test.mjx-test-inline":{display:"inline!important","margin-right":"-1px"},".mjx-test.mjx-test-default":{display:"block!important",clear:"both"},".mjx-ex-box":{display:"inline-block!important",position:"absolute",overflow:"hidden","min-height":0,"max-height":"none",padding:0,border:0,margin:0,width:"1px",height:"60ex"},".mjx-test-inline .mjx-left-box":{display:"inline-block",width:0,"float":"left"},".mjx-test-inline .mjx-right-box":{display:"inline-block",width:0,"float":"right"},".mjx-test-display .mjx-right-box":{display:"table-cell!important",width:"10000em!important","min-width":0,"max-width":"none",padding:0,border:0,margin:0},"#MathJax_CHTML_Tooltip":{"background-color":"InfoBackground",color:"InfoText",border:"1px solid black","box-shadow":"2px 2px 5px #AAAAAA","-webkit-box-shadow":"2px 2px 5px #AAAAAA","-moz-box-shadow":"2px 2px 5px #AAAAAA","-khtml-box-shadow":"2px 2px 5px #AAAAAA",padding:"3px 4px","z-index":401,position:"absolute",left:0,top:0,width:"auto",height:"auto",display:"none"}};var i=1000000;var n=5;var c={},r=MathJax.Hub.config;a.Augment({settings:f.config.menuSettings,config:{styles:p},Config:function(){if(!this.require){this.require=[]}this.SUPER(arguments).Config.call(this);var s=this.settings;if(s.scale){this.config.scale=s.scale}this.require.push(this.fontDir+"/TeX/fontdata.js");this.require.push(MathJax.OutputJax.extensionDir+"/MathEvents.js");c=this.config.linebreaks},Startup:function(){e=MathJax.Extension.MathEvents.Event;q=MathJax.Extension.MathEvents.Touch;h=MathJax.Extension.MathEvents.Hover;this.ContextMenu=e.ContextMenu;this.Mousedown=e.AltContextMenu;this.Mouseover=h.Mouseover;this.Mouseout=h.Mouseout;this.Mousemove=h.Mousemove;var s=a.addElement(document.body,"mjx-block",{style:{display:"block",width:"5in"}});this.pxPerInch=s.offsetWidth/5;s.parentNode.removeChild(s);this.TestSpan=a.Element("mjx-test",{style:{left:"1em"}},[["mjx-left-box"],["mjx-ex-box"],["mjx-right-box"]]);return o.Styles(this.config.styles,["InitializeCHTML",this])},InitializeCHTML:function(){this.getDefaultExEm();if(this.defaultEm){return}var s=MathJax.Callback();o.timer.start(o,function(t){if(t.time(s)){f.signal.Post(["CommonHTML Jax - no default em size"]);return}a.getDefaultExEm();if(a.defaultEm){s()}else{setTimeout(t,t.delay)}},this.defaultEmDelay,this.defaultEmTimeout);return s},defaultEmDelay:100,defaultEmTimeout:1000,getDefaultExEm:function(){var s=document.body.appendChild(this.TestSpan.cloneNode(true));s.className+=" mjx-test-inline mjx-test-default";this.defaultEm=this.getFontSize(s);this.defaultEx=s.childNodes[1].offsetHeight/60;this.defaultWidth=Math.max(0,s.lastChild.offsetLeft-s.firstChild.offsetLeft-2);document.body.removeChild(s)},getFontSize:(window.getComputedStyle?function(t){var s=window.getComputedStyle(t);return parseFloat(s.fontSize)}:function(s){return s.style.pixelLeft}),getMaxWidth:(window.getComputedStyle?function(t){var s=window.getComputedStyle(t);if(s.maxWidth!=="none"){return parseFloat(s.maxWidth)}return 0}:function(t){var s=t.currentStyle.maxWidth;if(s!=="none"){if(s.match(/\d*px/)){return parseFloat(s)}var u=t.style.left;t.style.left=s;s=t.style.pixelLeft;t.style.left=u;return s}return 0}),loadFont:function(s){f.RestartAfter(o.Require(this.fontDir+"/"+s))},fontLoaded:function(s){if(!s.match(/-|fontdata/)){s+="-Regular"}if(!s.match(/\.js$/)){s+=".js"}MathJax.Callback.Queue(["Post",f.Startup.signal,"CommonHTML - font data loaded for "+s],["loadComplete",o,this.fontDir+"/"+s])},Element:function(s,u,t){if(s.substr(0,4)==="mjx-"){if(!u){u={}}if(u.isMathJax==null){u.isMathJax=true}if(u.className){u.className=s+" "+u.className}else{u.className=s}s="span"}return this.HTMLElement(s,u,t)},addElement:function(u,s,v,t){return u.appendChild(this.Element(s,v,t))},HTMLElement:m.Element,ucMatch:m.ucMatch,setScript:m.setScript,getNode:function(x,w){var u=RegExp("\\b"+w+"\\b");var t=[];while(x){for(var v=0,s=x.childNodes.length;v<s;v++){var y=x.childNodes[v];if(y){if(u.test(y.className)){return y}if(y.id===""){t.push(y)}}}x=t.shift()}return null},preTranslate:function(w){var v=w.jax[this.id],E,B=v.length,H,z,C,F,t,G,s,I;var y=100000,x=false,D=0,u=c.automatic,A=c.width;if(u){x=!!A.match(/^\s*(\d+(\.\d*)?%\s*)?container\s*$/);if(x){A=A.replace(/\s*container\s*/,"")}else{y=this.defaultWidth}if(A===""){A="100%"}}for(E=0;E<B;E++){H=v[E];if(!H.parentNode){continue}z=H.previousSibling;if(z&&z.className&&String(z.className).substr(0,9)==="mjx-chtml"){z.parentNode.removeChild(z)}if(H.MathJax.preview){H.MathJax.preview.style.display="none"}t=H.MathJax.elementJax;if(!t){continue}t.CHTML={display:(t.root.Get("display")==="block"),preview:(t.CHTML||{}).preview};C=a.Element("mjx-chtml",{id:t.inputID+"-Frame",className:"MathJax_CHTML",isMathJax:true,jaxID:this.id,oncontextmenu:e.Menu,onmousedown:e.Mousedown,onmouseover:e.Mouseover,onmouseout:e.Mouseout,onmousemove:e.Mousemove,onclick:e.Click,ondblclick:e.DblClick,onkeydown:e.Keydown,tabIndex:f.getTabOrder(t)});if(t.CHTML.display){var J=a.Element("mjx-chtml",{className:"MJXc-display",isMathJax:false});J.appendChild(C);C=J}if(f.Browser.noContextMenu){C.ontouchstart=q.start;C.ontouchend=q.end}C.className+=" MJXc-processing";H.parentNode.insertBefore(C,H);F=this.TestSpan.cloneNode(true);F.className+=" mjx-test-"+(t.CHTML.display?"display":"inline");H.parentNode.insertBefore(F,H)}for(E=0;E<B;E++){H=v[E];if(!H.parentNode){continue}F=H.previousSibling;t=H.MathJax.elementJax;if(!t){continue}s=a.getFontSize(F);G=F.childNodes[1].offsetHeight/60;D=Math.max(0,t.CHTML.display?F.lastChild.offsetWidth-1:F.lastChild.offsetLeft-F.firstChild.offsetLeft-2);if(G===0||G==="NaN"){G=this.defaultEx;D=this.defaultWidth}if(D===0&&!t.CHTML.display){D=this.defaultWidth}if(x){y=D}I=(this.config.matchFontHeight?G/this.TEX.x_height/s:1);I=Math.floor(Math.max(this.config.minScaleAdjust/100,I)*this.config.scale);t.CHTML.scale=I/100;t.CHTML.fontSize=I+"%";t.CHTML.outerEm=s;t.CHTML.em=this.em=s*I/100;t.CHTML.ex=G;t.CHTML.cwidth=D/this.em;t.CHTML.lineWidth=(u?this.length2em(A,y/this.em,1):y)}for(E=0;E<B;E++){H=v[E];if(!H.parentNode){continue}t=H.MathJax.elementJax;if(!t){continue}H.parentNode.removeChild(H.previousSibling);if(H.MathJax.preview){H.MathJax.preview.style.display=""}}w.CHTMLeqn=w.CHTMLlast=0;w.CHTMLi=-1;w.CHTMLchunk=this.config.EqnChunk;w.CHTMLdelay=false},Translate:function(t,x){if(!t.parentNode){return}if(x.CHTMLdelay){x.CHTMLdelay=false;f.RestartAfter(MathJax.Callback.Delay(this.config.EqnChunkDelay))}var s=t.MathJax.elementJax,w=s.root,v=document.getElementById(s.inputID+"-Frame");if(!v){return}this.getMetrics(s);if(this.scale!==1){v.style.fontSize=s.CHTML.fontSize}this.initCHTML(w,v);this.savePreview(t);this.CHTMLnode=v;try{w.setTeXclass();w.toCommonHTML(v)}catch(u){while(v.firstChild){v.removeChild(v.firstChild)}delete this.CHTMLnode;this.restorePreview(t);throw u}delete this.CHTMLnode;this.restorePreview(t);if(s.CHTML.display){v=v.parentNode}v.className=v.className.replace(/ [^ ]+$/,"");v.className+=" MJXc-processed";if(t.MathJax.preview){s.CHTML.preview=t.MathJax.preview;delete t.MathJax.preview}x.CHTMLeqn+=(x.i-x.CHTMLi);x.CHTMLi=x.i;if(x.CHTMLeqn>=x.CHTMLlast+x.CHTMLchunk){this.postTranslate(x);x.CHTMLchunk=Math.floor(x.CHTMLchunk*this.config.EqnChunkFactor);x.CHTMLdelay=true}},initCHTML:function(t,s){},savePreview:function(s){var t=s.MathJax.preview;if(t&&t.parentNode){s.MathJax.tmpPreview=document.createElement("span");t.parentNode.replaceChild(s.MathJax.tmpPreview,t)}},restorePreview:function(s){var t=s.MathJax.tmpPreview;if(t){t.parentNode.replaceChild(s.MathJax.preview,t);delete s.MathJax.tmpPreview}},getMetrics:function(s){var t=s.CHTML;this.jax=s;this.em=t.em;this.outerEm=t.outerEm;this.scale=t.scale;this.cwidth=t.cwidth;this.linebreakWidth=t.lineWidth},postTranslate:function(x){var t=x.jax[this.id];for(var v=x.CHTMLlast,s=x.CHTMLeqn;v<s;v++){var u=t[v];if(u&&u.MathJax.elementJax){u.previousSibling.className=u.previousSibling.className.replace(/ [^ ]+$/,"");var w=u.MathJax.elementJax.CHTML;if(w.preview){w.preview.innerHTML="";u.MathJax.preview=w.preview;delete w.preview}}}x.CHTMLlast=x.CHTMLeqn},getJaxFromMath:function(s){if(s.parentNode.className.match(/MJXc-display/)){s=s.parentNode}do{s=s.nextSibling}while(s&&s.nodeName.toLowerCase()!=="script");return f.getJaxFor(s)},getHoverSpan:function(s,t){return s.root.CHTMLnodeElement()},getHoverBBox:function(s,v,w){var x=s.root.CHTML,u=s.CHTML.outerEm;var t={w:x.w*u,h:x.h*u,d:x.d*u};if(x.width){t.width=x.width}return t},Zoom:function(u,B,A,s,y){this.getMetrics(u);var v=a.addElement(B,"mjx-chtml",{style:{"font-size":Math.floor(a.scale*100)+"%"},isMathJax:false});a.CHTMLnode=v;this.idPostfix="-zoom";u.root.toCommonHTML(v);this.idPostfix="";var t=v.style,C=u.root.CHTML;if(C.t>C.h){t.marginTop=a.Em(C.t-C.h)}if(C.b>C.d){t.marginBottom=a.Em(C.b-C.d)}if(C.l<0){t.paddingLeft=a.Em(-C.l)}if(C.r>C.w){t.marginRight=a.Em(C.r-C.w)}t.position="absolute";var z=v.offsetWidth,x=v.offsetHeight,D=A.firstChild.offsetHeight,w=A.firstChild.offsetWidth;v.style.position="";return{Y:-e.getBBox(B).h,mW:w,mH:D,zW:z,zH:x}},Remove:function(s){var t=document.getElementById(s.inputID+"-Frame");if(t&&s.CHTML.display){t=t.parentNode}if(t){t.parentNode.removeChild(t)}delete s.CHTML},ID:0,idPostfix:"",GetID:function(){this.ID++;return this.ID},MATHSPACE:{veryverythinmathspace:1/18,verythinmathspace:2/18,thinmathspace:3/18,mediummathspace:4/18,thickmathspace:5/18,verythickmathspace:6/18,veryverythickmathspace:7/18,negativeveryverythinmathspace:-1/18,negativeverythinmathspace:-2/18,negativethinmathspace:-3/18,negativemediummathspace:-4/18,negativethickmathspace:-5/18,negativeverythickmathspace:-6/18,negativeveryverythickmathspace:-7/18,thin:0.04,medium:0.06,thick:0.1,infinity:i},SPACECLASS:{thinmathspace:"MJXc-space1",mediummathspace:"MJXc-space2",thickmathspace:"MJXc-space3"},pxPerInch:96,em:16,maxStretchyParts:1000,FONTDEF:{},TEXDEF:{x_height:0.442,quad:1,num1:0.676508,num2:0.393732,num3:0.44373,denom1:0.685951,denom2:0.344841,sup1:0.412892,sup2:0.362892,sup3:0.288888,sub1:0.15,sub2:0.247217,sup_drop:0.386108,sub_drop:0.05,delim1:2.39,delim2:1,axis_height:0.25,rule_thickness:0.06,big_op_spacing1:0.111111,big_op_spacing2:0.166666,big_op_spacing3:0.2,big_op_spacing4:0.45,big_op_spacing5:0.1,surd_height:0.075,scriptspace:0.05,nulldelimiterspace:0.12,delimiterfactor:901,delimitershortfall:0.3,min_rule_thickness:1.25},isChar:function(s){if(s.length===1){return true}if(s.length!==2){return false}var t=s.charCodeAt(0);return(t>=55296&&t<56319)},unicodeChar:function(s){if(s<65535){return String.fromCharCode(s)}s-=65536;return String.fromCharCode((s>>10)+55296)+String.fromCharCode((s&1023)+56320)},getUnicode:function(s){var t=s.text.charCodeAt(s.i);s.i++;if(t>=55296&&t<56319){t=(((t-55296)<<10)+(s.text.charCodeAt(s.i)-56320))+65536;s.i++}return t},getCharList:function(w,v){var u,z,s=w.cache,B=v;if(s[v]){return s[v]}if(v>65535&&this.FONTDATA.RemapPlane1){var y=this.FONTDATA.RemapPlane1(v,w);v=y.n;w=y.variant}var t=this.FONTDATA.RANGES,A=this.FONTDATA.VARIANT;if(v>=t[0].low&&v<=t[t.length-1].high){for(u=0,z=t.length;u<z;u++){if(t[u].name==="alpha"&&w.noLowerCase){continue}var x=w["offset"+t[u].offset];if(x&&v>=t[u].low&&v<=t[u].high){if(t[u].remap&&t[u].remap[v]){v=x+t[u].remap[v]}else{v=v-t[u].low+x;if(t[u].add){v+=t[u].add}}if(w["variant"+t[u].offset]){w=A[w["variant"+t[u].offset]]}break}}}s[B]=this.remapChar(w,v,0);return s[B]},remapChar:function(t,y,w){var v=[],x=this.FONTDATA.VARIANT;if(t.remap&&t.remap[y]){y=t.remap[y];if(t.remap.variant){t=x[t.remap.variant]}}else{if(this.FONTDATA.REMAP[y]&&!t.noRemap){y=this.FONTDATA.REMAP[y]}}if(g(y)){if(y[2]){w=n}t=x[y[1]];y=y[0]}if(typeof(y)==="string"){var s={text:y,i:0,length:y.length};while(s.i<s.length){y=this.getUnicode(s);var u=this.getCharList(t,y);if(u){v.push.apply(v,u)}}}else{if(t.cache[y]){v=t.cache[y]}else{t.cache[y]=v=this.lookupChar(t,y,w)}}return v},lookupChar:function(v,z,x){var y=v;while(v){for(var u=0,s=v.fonts.length;u<s;u++){var t=this.FONTDATA.FONTS[v.fonts[u]];if(typeof(t)==="string"){this.loadFont(t)}var w=t[z];if(w){this.fixChar(w,z);if(w[5].space){return[{type:"space",w:w[2],font:t}]}return[{type:"char",font:t,n:z}]}else{if(t.Extra){this.findBlock(t,z)}}}v=this.FONTDATA.VARIANT[v.chain];if(v&&v.remap&&v.remap[z]&&x++<n){return this.remapChar(v,z,x)}}return[this.unknownChar(y,z)]},fixChar:function(s,t){if(s.length===5){s[5]={}}if(s.c==null){s[0]/=1000;s[1]/=1000;s[2]/=1000;s[3]/=1000;s[4]/=1000;s.c=this.unicodeChar(t)}return s},findBlock:function(u,y){var t=u.Extra,v=u.file,x;for(var w=0,s=t.length;w<s;w++){if(typeof(t[w])==="number"){if(y===t[w]){x=v;break}}else{if(y<t[w][0]){return}if(y<=t[w][1]){x=v;break}}}if(x){delete u.Extra;this.loadFont(v)}},unknownChar:function(s,v){f.signal.Post(["CommonHTML Jax - unknown char",v,s]);var u="";if(s.bold){u+="B"}if(s.italic){u+="I"}var t=this.FONTDATA.UNKNOWN[u||"R"];if(!t[v]){this.getUnknownChar(t,v)}return{type:"unknown",n:v,font:t}},getUnknownChar:function(t,v){var u=this.unicodeChar(v);var s=this.getHDW(u,t.className);t[v]=[0.8,0.2,s.w,0,s.w,{a:Math.max(0,(s.h-s.d)/2),h:s.h,d:s.d}];t[v].c=u},styledText:function(t,w){f.signal.Post(["CommonHTML Jax - styled text",w,t]);var u=t.style;var x="_"+(u["font-family"]||t.className||"");if(u["font-weight"]){x+="_"+u["font-weight"]}if(u["font-style"]){x+="_"+u["font-style"]}if(!this.STYLEDTEXT){this.STYLEDTEXT={}}if(!this.STYLEDTEXT[x]){this.STYLEDTEXT[x]={className:t.className||""}}var v=this.STYLEDTEXT[x];if(!v["_"+w]){var s=this.getHDW(w,t.className||"",u);v["_"+w]=[0.8,0.2,s.w,0,s.w,{a:Math.max(0,(s.h-s.d)/2),h:s.h,d:s.d}];v["_"+w].c=w}return{type:"unknown",n:"_"+w,font:v,style:u,rscale:t.rscale}},getHDW:function(B,u,F){var t=a.addElement(a.CHTMLnode,"mjx-chartest",{className:u},[["mjx-char",{style:F},[B]]]);var s=a.addElement(a.CHTMLnode,"mjx-chartest",{className:u},[["mjx-char",{style:F},[B,["mjx-box"]]]]);t.firstChild.style.fontSize=s.firstChild.style.fontSize="";var v=5*a.em;var E=t.offsetHeight,C=s.offsetHeight,x=t.offsetWidth;a.CHTMLnode.removeChild(t);a.CHTMLnode.removeChild(s);if(C===0){v=5*a.defaultEm;var A=document.body.appendChild(document.createElement("div"));A.appendChild(t);A.appendChild(s);E=t.offsetHeight,C=s.offsetHeight,x=t.offsetWidth;document.body.removeChild(A)}var z=(C-1000)/v,D=x/v,y=E/v-z;return{h:y,d:z,w:D}},addCharList:function(v,x,y){var w={text:"",className:null,a:0};for(var t=0,s=x.length;t<s;t++){var u=x[t];if(this.charList[u.type]){(this.charList[u.type])(u,v,y,w,s)}}if(w.text!==""){if(v.childNodes.length){this.charList.flushText(v,w)}else{m.addText(v,w.text);if(v.className){v.className+=" "+w.className}else{v.className=w.className}}}y.b=(w.flushed?0:y.a)},charList:{"char":function(D,x,B,u,y){var w=D.font,A=(w.remapCombining||{})[D.n];if(w.className===u.className){A=null}else{if(u.className||(A&&u.text!=="")){this.flushText(x,u)}}if(!u.a){u.a=w.centerline/1000}if(u.a>(B.a||0)){B.a=u.a}u.className=w.className;var t=w[D.n];if(A){var v=w;if(g(A)){v=a.FONTDATA.FONTS[A[1]];A=A[0];if(typeof(v)==="string"){a.loadFont(v)}}if(v[D.n]){a.fixChar(v[D.n],D.n)}t=a.fixChar(v[A],A);u.className=v.className}u.text+=t.c;if(B.h<t[0]+j){B.t=B.h=t[0]+j}if(B.d<t[1]+b){B.b=B.d=t[1]+b}if(B.l>B.w+t[3]){B.l=B.w+t[3]}if(B.r<B.w+t[4]){B.r=B.w+t[4]}B.w+=t[2]*(D.rscale||1);if(y==1&&w.skew&&w.skew[D.n]){B.skew=w.skew[D.n]}if(t[5]&&t[5].rfix){this.flushText(x,u).style.marginRight=a.Em(t[5].rfix/1000)}if(A){var z=this.flushText(x,u);var s=(v[D.n]||w[D.n])[4]-(t[4]-t[2]);z.style.marginLeft=a.Em(-t[2]-s);if(s<0){z.style.marginRight=a.Em(-s)}}},space:function(t,s,v,u){if(t.w){if(u.text===""){u.className=t.font.className}this.flushText(s,u).style.marginRight=a.Em(t.w);v.w+=t.w}},unknown:function(t,s,w,u){(this["char"])(t,s,w,u,0);var v=t.font[t.n];if(v[5].a){u.a=v[5].a;if(w.a==null||u.a>w.a){w.a=u.a}}s=this.flushText(s,u,t.style);if(v[2]<3){s.style.width=a.Em(v[2])}},flushText:function(t,u,s){t=a.addElement(t,"mjx-charbox",{className:u.className,style:s},[u.text]);if(u.a){t.style.paddingBottom=a.Em(u.a)}u.text="";u.className=null;u.a=0;u.flushed=true;return t}},handleText:function(u,x,t,w){if(u.childNodes.length===0){a.addElement(u,"mjx-char");w=a.BBOX.empty(w)}if(typeof(t)==="string"){t=this.FONTDATA.VARIANT[t]}if(!t){t=this.FONTDATA.VARIANT[d.VARIANT.NORMAL]}var s={text:x,i:0,length:x.length},v=[];if(t.style&&s.length){v.push(this.styledText(t,x))}else{while(s.i<s.length){var y=this.getUnicode(s);v.push.apply(v,this.getCharList(t,y))}}if(v.length){this.addCharList(u.firstChild,v,w)}w.clean();if(w.d<0){w.D=w.d;w.d=0}if(w.h-w.a){u.firstChild.style[w.h-w.a<0?"marginTop":"paddingTop"]=this.EmRounded(w.h-w.a)}if(w.d>-w.b){u.firstChild.style.paddingBottom=this.EmRounded(w.d+w.b)}return w},createDelimiter:function(x,s,u,A,v){if(!s){var B=this.BBOX.zero();B.w=B.r=this.TEX.nulldelimiterspace;a.addElement(x,"mjx-box",{style:{width:B.w}});return B}if(!(u instanceof Array)){u=[u,u]}var z=u[1];u=u[0];var t={alias:s};while(t.alias){s=t.alias;t=this.FONTDATA.DELIMITERS[s];if(!t){t={HW:[0,this.FONTDATA.VARIANT[d.VARIANT.NORMAL]]}}}if(t.load){f.RestartAfter(o.Require(this.fontDir+"/TeX/fontdata-"+t.load+".js"))}for(var y=0,w=t.HW.length;y<w;y++){if(t.HW[y][0]>=u-0.01||(y==w-1&&!t.stretch)){if(t.HW[y][3]){s=t.HW[y][3]}B=this.createChar(x,[s,t.HW[y][1]],(t.HW[y][2]||1),v);B.offset=0.6*B.w;if(A){B.scale=A.scale;A.rscale=A.rscale}return B}}if(!t.stretch){return B}return this["extendDelimiter"+t.dir](x,z,t.stretch,A,v)},extendDelimiterV:function(E,x,P,w,C){E=a.addElement(E,"mjx-delim-v");var N=a.Element("span");var B,A,O,v,I,t,F,y,G=1,M;I=this.createChar(N,(P.top||P.ext),1,C);B=N.removeChild(N.firstChild);t=this.createChar(N,(P.bot||P.ext),1,C);A=N.removeChild(N.firstChild);F=y=a.BBOX.zero();var J=I.h+I.d+t.h+t.d-l;E.appendChild(B);if(P.mid){F=this.createChar(N,P.mid,1,C);O=N.removeChild(N.firstChild);J+=F.h+F.d;G=2}if(P.min&&x<J*P.min){x=J*P.min}if(x>J){y=this.createChar(N,P.ext,1,C);v=N.removeChild(N.firstChild);var L=y.h+y.d,u=L-l;var D=Math.min(Math.ceil((x-J)/(G*u)),this.maxStretchyParts);if(P.fullExtenders){x=D*G*u+J}else{u=(x-J)/(G*D)}M=y.d+y.a-L/2;v.style.margin=v.style.padding="";v.style.lineHeight=a.Em(u);v.style.marginBottom=a.Em(M-l/2/G);v.style.marginTop=a.Em(-M-l/2/G);var K=v.textContent,z="\n"+K;while(--D>0){K+=z}v.textContent=K;E.appendChild(v);if(P.mid){E.appendChild(O);E.appendChild(v.cloneNode(true))}}else{M=(x-J-l)/G;B.style.marginBottom=a.Em(M+parseFloat(B.style.marginBottom||"0"));if(P.mid){E.appendChild(O)}A.style.marginTop=a.Em(M+parseFloat(A.style.marginTop||"0"))}E.appendChild(A);var s=a.BBOX({w:Math.max(I.w,y.w,t.w,F.w),l:Math.min(I.l,y.l,t.l,F.l),r:Math.max(I.r,y.r,t.r,F.r),h:x-t.d,d:t.d,t:x-t.d,b:t.d});s.offset=0.5*s.w;if(w){s.scale=w.scale;s.rscale=w.rscale}return s},extendDelimiterH:function(F,s,P,v,D){F=a.addElement(F,"mjx-delim-h");var N=a.Element("span");var t,M,O,u,K,C,x,G,z,H=1;C=this.createChar(N,(P.left||P.rep),1,D);t=N.removeChild(N.firstChild);x=this.createChar(N,(P.right||P.rep),1,D);M=N.removeChild(N.firstChild);z=this.createChar(N,P.rep,1,D);u=N.removeChild(N.firstChild);t.style.marginLeft=a.Em(-C.l);M.style.marginRight=a.Em(x.r-x.w);F.appendChild(t);var Q=a.BBOX.zero();Q.h=Math.max(C.h,x.h,z.h);Q.d=Math.max(C.D||C.d,x.D||x.d,z.D||z.d);var y=(C.r-C.l)+(x.r-x.l)-l;if(P.mid){G=this.createChar(N,P.mid,1,D);O=N.removeChild(N.firstChild);O.style.marginleft=a.Em(-G.l);O.style.marginRight=a.Em(G.r-G.w);y+=G.r-G.l+l;H=2;if(G.h>Q.h){Q.h=G.h}if(G.d>Q.d){Q.d=G.d}}if(P.min&&s<y*P.min){s=y*P.min}Q.w=Q.r=s;if(s>y){var B=z.r-z.l,J=B-l;var E=Math.min(Math.ceil((s-y)/(H*J)),this.maxStretchyParts);if(P.fullExtenders){s=E*H*J+y}else{J=(s-y)/(H*E)}var L=(B-J+l/H)/2;u.style.marginLeft=a.Em(-z.l-L);u.style.marginRight=a.Em(z.r-z.w+L);u.style.letterSpacing=a.Em(-(z.w-J));t.style.marginRight=a.Em(C.r-C.w);M.style.marginleft=a.Em(-x.l);var I=u.textContent,A=I;while(--E>0){I+=A}u.textContent=I;F.appendChild(u);if(P.mid){F.appendChild(O);K=F.appendChild(u.cloneNode(true))}}else{L=(s-y-l/H)/2;t.style.marginRight=a.Em(C.r-C.w+L);if(P.mid){F.appendChild(O)}M.style.marginLeft=a.Em(-x.l+L)}F.appendChild(M);this.adjustHeights([t,u,O,K,M],[C,z,G,z,x],Q);if(v){Q.scale=v.scale;Q.rscale=v.rscale}return Q},adjustHeights:function(t,w,x){var u=x.h,y=x.d;if(x.d<0){y=-x.d;x.D=x.d;x.d=0}for(var v=0,s=t.length;v<s;v++){if(t[v]){t[v].style.paddingTop=a.Em(u-w[v].a);t[v].style.paddingBottom=a.Em(y+w[v].a);t[v].style.marginTop=t[v].style.marginBottom=0}}},createChar:function(u,y,w,t){var B="",x={fonts:[y[1]],noRemap:true,cache:{}};if(t&&t===d.VARIANT.BOLD&&this.FONTDATA.FONTS[y[1]+"-Bold"]){x.fonts=[y[1]+"-Bold",y[1]]}if(typeof(y[1])!=="string"){x=y[1]}if(y[0] instanceof Array){for(var z=0,v=y[0].length;z<v;z++){B+=String.fromCharCode(y[0][z])}}else{B=String.fromCharCode(y[0])}if(y[4]){w*=y[4]}var A=this.handleText(u,B,x),s=u.firstChild.style;if(w!==1){s.fontSize=this.Percent(w)}if(y[2]){s.paddingLeft=this.Em(y[2]);A.w+=y[2];A.r+=y[2]}if(y[3]){s.verticalAlign=this.Em(y[3]);A.h+=y[3];if(A.h<0){A.h=0}}if(y[5]){s.marginTop=this.Em(y[5]);A.h+=y[5];A.t+=y[5]}if(y[6]){s.marginBottom=this.Em(y[6]);A.d+=y[6];A.b+=y[6]}return A},length2em:function(w,u,x){if(typeof(w)!=="string"){w=w.toString()}if(w===""){return""}if(w===d.SIZE.NORMAL){return 1}if(w===d.SIZE.BIG){return 2}if(w===d.SIZE.SMALL){return 0.71}if(this.MATHSPACE[w]){return this.MATHSPACE[w]}var t=w.match(/^\s*([-+]?(?:\.\d+|\d+(?:\.\d*)?))?(pt|em|ex|mu|px|pc|in|mm|cm|%)?/);var s=parseFloat(t[1]||"1"),v=t[2];if(u==null){u=1}if(!x){x=1}x=1/this.em/x;if(v==="em"){return s}if(v==="ex"){return s*this.TEX.x_height}if(v==="%"){return s/100*u}if(v==="px"){return s*x}if(v==="pt"){return s/10}if(v==="pc"){return s*1.2}x*=this.pxPerInch;if(v==="in"){return s*x}if(v==="cm"){return s*x/2.54}if(v==="mm"){return s*x/25.4}if(v==="mu"){return s/18}return s*u},thickness2em:function(s,t){var u=a.TEX.rule_thickness/(t||1);if(s===d.LINETHICKNESS.MEDIUM){return u}if(s===d.LINETHICKNESS.THIN){return 0.67*u}if(s===d.LINETHICKNESS.THICK){return 1.67*u}return this.length2em(s,u,t)},Em:function(s){if(Math.abs(s)<0.001){return"0"}return(s.toFixed(3).replace(/\.?0+$/,""))+"em"},EmRounded:function(s){s=(Math.round(s*a.em)+0.05)/a.em;if(Math.abs(s)<0.0006){return"0em"}return s.toFixed(3).replace(/\.?0+$/,"")+"em"},unEm:function(s){return parseFloat(s)},Px:function(s,t){s*=this.em;if(t&&s<t){s=t}if(Math.abs(s)<0.1){return"0"}return s.toFixed(1).replace(/\.0$/,"")+"px"},Percent:function(s){return(100*s).toFixed(1).replace(/\.?0+$/,"")+"%"},Transform:function(v,t,s){var u=v.style;u.transform=u.WebkitTransform=u.MozTransform=u["-ms-transform"]=t;if(s){u.transformOrigin=u.WebkitTransformOrigin=u.MozTransformOrigin=u["-ms-transform-origin"]=s}},arrayEntry:function(s,t){return s[Math.max(0,Math.min(t,s.length-1))]},removeStyles:["fontSize","fontFamily","fontWeight","fontStyle","fontVariant","font"]});a.BBOX=MathJax.Object.Subclass({Init:function(s){for(var t in s){if(s.hasOwnProperty(t)){this[t]=s[t]}}},clean:function(){if(this.h===-i){this.h=0}if(this.d===-i){this.d=0}if(this.l===i){this.l=0}if(this.r===-i){this.r=0}if(this.t===-i){this.t=0}if(this.b===-i){this.b=0}if(this.D&&this.d>0){delete this.D}},rescale:function(s){this.w*=s;this.h*=s;this.d*=s;this.l*=s;this.r*=s;this.t*=s;this.b*=s;if(this.L){this.L*=s}if(this.R){this.R*=s}if(this.D){this.D*=s}},combine:function(t,s,v){t.X=s;t.Y=v;var u=t.rscale;if(s+u*t.r>this.r){this.r=s+u*t.r}if(s+u*t.l<this.l){this.l=s+u*t.l}if(s+u*(t.w+(t.L||0)+(t.R||0))>this.w){this.w=s+u*(t.w+(t.L||0)+(t.R||0))}if(v+u*t.h>this.h){this.h=v+u*t.h}if(t.D&&(this.D==null||u*t.D-v>this.D)&&u*t.D>this.d){this.D=u*t.D-v}else{if(t.D==null&&this.D){delete this.D}}if(u*t.d-v>this.d){this.d=u*t.d-v}if(v+u*t.t>this.t){this.t=v+u*t.t}if(u*t.b-v>this.b){this.b=u*t.b-v}},append:function(t){var u=t.rscale;var s=this.w;if(s+u*t.r>this.r){this.r=s+u*t.r}if(s+u*t.l<this.l){this.l=s+u*t.l}this.w+=u*(t.w+(t.L||0)+(t.R||0));if(u*t.h>this.h){this.h=u*t.h}if(t.D&&(this.D==null||u*t.D>this.D)&&u*t.D>this.d){this.D=u*t.D}else{if(t.D==null&&this.D){delete this.D}}if(u*t.d>this.d){this.d=u*t.d}if(u*t.t>this.t){this.t=u*t.t}if(u*t.b>this.b){this.b=u*t.b}},updateFrom:function(s){this.h=s.h;this.d=s.d;this.w=s.w;this.r=s.r;this.l=s.l;this.t=s.t;this.b=s.b;if(s.pwidth){this.pwidth=s.pwidth}if(s.D){this.D=s.D}else{delete this.D}},adjust:function(t,s,v,u){this[s]+=a.length2em(t,1,this.scale);if(u==null){if(this[s]>this[v]){this[v]=this[s]}}else{if(this[v]<u){this[v]=u}}}},{zero:function(){return a.BBOX({h:0,d:0,w:0,l:0,r:0,t:0,b:0,scale:1,rscale:1})},empty:function(s){if(!s){s=a.BBOX.zero()}s.h=s.d=s.r=s.t=s.b=-i;s.w=0;s.l=i;delete s.pwidth;return s},styleAdjust:[["borderTopWidth","h","t"],["borderRightWidth","w","r"],["borderBottomWidth","d","b"],["borderLeftWidth","w","l",0],["paddingTop","h","t"],["paddingRight","w","r"],["paddingBottom","d","b"],["paddingLeft","w","l",0],]});MathJax.Hub.Register.StartupHook("mml Jax Ready",function(){d=MathJax.ElementJax.mml;d.mbase.Augment({toCommonHTML:function(t,s){return this.CHTMLdefaultNode(t,s)},CHTMLmultiline:function(){d.mbase.CHTMLautoloadFile("multiline")},CHTMLdefaultNode:function(v,t){if(!t){t={}}v=this.CHTMLcreateNode(v);this.CHTML=a.BBOX.empty();this.CHTMLhandleStyle(v);if(this.isToken){this.CHTMLgetVariant()}this.CHTMLhandleScale(v);var s=Math.max((t.minChildren||0),this.data.length);for(var u=0;u<s;u++){this.CHTMLaddChild(v,u,t)}if(!t.noBBox){this.CHTML.clean()}this.CHTMLhandleSpace(v);this.CHTMLhandleBBox(v);this.CHTMLhandleColor(v);return v},CHTMLaddChild:function(x,t,s){var z=this.data[t],w;var u=s.childNodes;if(u instanceof Array){u=u[t]||"span"}if(z){if(u){x=a.addElement(x,u)}w=z.toCommonHTML(x,s.childOptions);if(u&&z.CHTML.rscale!==1){x.style.fontSize=x.firstChild.style.fontSize;x.firstChild.style.fontSize=""}if(!s.noBBox){var y=this.CHTML,v=z.CHTML;y.append(v);if(this.data.length===1){if(v.ic){y.ic=v.ic}if(v.skew){y.skew=v.skew}}else{delete y.ic;delete y.skew}if(v.pwidth){y.pwidth=v.pwidth}}}else{if(s.forceChild){w=a.addElement(x,(u||"mjx-box"))}}return w},CHTMLchildNode:function(t,s){t=t.childNodes[s];if(t.nodeName.toLowerCase()==="a"){t=t.firstChild}return t},CHTMLcoreNode:function(s){if(this.inferRow&&this.data[0]){return this.data[0].CHTMLcoreNode(s.firstChild)}return this.CHTMLchildNode(s,this.CoreIndex())},CHTMLstretchChildV:function(v,u,y){var x=this.data[v];if(x){var z=this.CHTML,t=x.CHTML;if(t.stretch||(t.stretch==null&&x.CHTMLcanStretch("Vertical",u,y))){var s=t.w;t=x.CHTMLstretchV(u,y);z.w+=t.w-s;if(z.w>z.r){z.r=z.w}if(t.h>z.h){z.h=t.h}if(t.d>z.d){z.d=t.d}if(t.t>z.t){z.t=t.t}if(t.b>z.b){z.b=t.b}}}},CHTMLstretchChildH:function(v,s,x){var y=this.data[v];if(y){var z=this.CHTML,u=y.CHTML;if(u.stretch||(u.stretch==null&&y.CHTMLcanStretch("Horizontal",s))){var t=u.w;u=y.CHTMLstretchH(this.CHTMLchildNode(x,v),s);z.w+=u.w-t;if(z.w>z.r){z.r=z.w}if(u.h>z.h){z.h=u.h}if(u.d>z.d){z.d=u.d}if(u.t>z.t){z.t=u.t}if(u.b>z.b){z.b=u.b}}}},CHTMLupdateFrom:function(s){this.CHTML.updateFrom(s);if(this.inferRow){this.data[0].CHTML.updateFrom(s)}},CHTMLcanStretch:function(w,u,v){var t=false;if(this.isEmbellished()){var s=this.Core();if(s&&s!==this){t=s.CHTMLcanStretch(w,u,v)}}this.CHTML.stretch=t;return t},CHTMLstretchV:function(s,t){this.CHTMLupdateFrom(this.Core().CHTMLstretchV(s,t));return this.CHTML},CHTMLstretchH:function(t,s){this.CHTMLupdateFrom(this.CHTMLstretchCoreH(t,s));return this.CHTML},CHTMLstretchCoreH:function(t,s){return this.Core().CHTMLstretchH(this.CHTMLcoreNode(t),s)},CHTMLcreateNode:function(s){if(!this.CHTML){this.CHTML={}}this.CHTML=a.BBOX.zero();if(this.href){s=a.addElement(s,"a",{href:this.href,isMathJax:true})}if(!this.CHTMLnodeID){this.CHTMLnodeID=a.GetID()}var t=(this.id||"MJXc-Node-"+this.CHTMLnodeID)+a.idPostfix;return this.CHTMLhandleAttributes(a.addElement(s,"mjx-"+this.type,{id:t}))},CHTMLnodeElement:function(){if(!this.CHTMLnodeID){return null}return document.getElementById((this.id||"MJXc-Node-"+this.CHTMLnodeID)+a.idPostfix)},CHTMLlength2em:function(t,s){return a.length2em(t,s,this.CHTML.scale)},CHTMLhandleAttributes:function(v){if(this["class"]){if(v.className){v.className+=" "+this["class"]}else{v.className=this["class"]}}if(this.attrNames){var z=this.attrNames,u=d.nocopyAttributes,y=f.config.ignoreMMLattributes;var w=(this.type==="mstyle"?d.math.prototype.defaults:this.defaults);for(var t=0,s=z.length;t<s;t++){var x=z[t];if(y[x]==false||(!u[x]&&!y[x]&&w[x]==null&&typeof(v[x])==="undefined")){v.setAttribute(x,this.attr[x])}}}return v},CHTMLhandleScale:function(v){var x=1,u=this.parent,w=(u?u.CHTML.scale:1);var s=this.getValues("scriptlevel","fontsize");s.mathsize=this.Get("mathsize",null,!this.isToken);if(s.scriptlevel!==0){if(s.scriptlevel>2){s.scriptlevel=2}x=Math.pow(this.Get("scriptsizemultiplier"),s.scriptlevel);s.scriptminsize=a.length2em(this.Get("scriptminsize"),0.8,1);if(x<s.scriptminsize){x=s.scriptminsize}}if(this.removedStyles&&this.removedStyles.fontSize&&!s.fontsize){s.fontsize=this.removedStyles.fontSize}if(s.fontsize&&!this.mathsize){s.mathsize=s.fontsize}if(s.mathsize!==1){x*=a.length2em(s.mathsize,1,1)}var t=this.CHTMLvariant;if(t&&t.style&&t.style["font-family"]){x*=(a.config.scale/100)/a.scale}this.CHTML.scale=x;w=this.CHTML.rscale=x/w;if(Math.abs(w-1)<0.001){w=1}if(v&&w!==1){v.style.fontSize=a.Percent(w)}return x},CHTMLhandleStyle:function(v){if(!this.style){return}var u=v.style;u.cssText=this.style;this.removedStyles={};for(var t=0,s=a.removeStyles.length;t<s;t++){var w=a.removeStyles[t];if(u[w]){this.removedStyles[w]=u[w];u[w]=""}}},CHTMLhandleBBox:function(w){var t=this.CHTML,v=w.style;if(this.data.length===1&&(this.data[0].CHTML||{}).pwidth){t.pwidth=this.data[0].CHTML.pwidth;t.mwidth=this.data[0].CHTML.mwidth;v.width="100%"}else{if(t.pwidth){t.mwidth=a.Em(t.w);v.width="100%"}else{if(t.w<0){v.width="0px";v.marginRight=a.Em(t.w)}}}if(!this.style){return}for(var u=0,s=a.BBOX.styleAdjust.length;u<s;u++){var x=a.BBOX.styleAdjust[u];if(x&&v[x[0]]){t.adjust(v[x[0]],x[1],x[2],x[3])}}},CHTMLhandleColor:function(s){if(this.mathcolor){s.style.color=this.mathcolor}else{if(this.color){s.style.color=this.color}}if(this.mathbackground){s.style.backgroundColor=this.mathbackground}else{if(this.background){s.style.backgroundColor=this.background}}},CHTMLhandleSpace:function(s){if(!this.useMMLspacing){var t=this.texSpacing();if(t!==""){this.CHTML.L=this.CHTMLlength2em(t);s.className+=" "+a.SPACECLASS[t]}}},CHTMLhandleText:function(t,u,s){if(t.firstChild&&!this.CHTML){this.CHTML=a.BBOX.empty()}this.CHTML=a.handleText(t,u,s,this.CHTML)},CHTMLgetVariant:function(){var s=this.getValues("mathvariant","fontfamily","fontweight","fontstyle"),u;s.hasVariant=this.Get("mathvariant",true);if(this.removedStyles){u=this.removedStyles;if(u.fontFamily){s.family=u.fontFamily}if(u.fontWeight){s.weight=u.fontWeight}if(u.fontStyle){s.style=u.fontStyle}}if(!s.hasVariant){if(s.fontfamily){s.family=s.fontfamily}if(s.fontweight){s.weight=s.fontweight}if(s.fontstyle){s.style=s.fontstyle}}if(s.weight&&s.weight.match(/^\d+$/)){s.weight=(parseInt(s.weight)>600?"bold":"normal")}var t=s.mathvariant;if(this.variantForm){t="-TeX-variant"}if(s.family&&!s.hasVariant){if(!s.weight&&s.mathvariant.match(/bold/)){s.weight="bold"}if(!s.style&&s.mathvariant.match(/italic/)){s.style="italic"}this.CHTMLvariant={fonts:[],noRemap:true,cache:{},style:{"font-family":s.family,"font-weight":s.weight||"normal","font-style":s.style||"normal"}};return}if(s.weight==="bold"){t={normal:d.VARIANT.BOLD,italic:d.VARIANT.BOLDITALIC,fraktur:d.VARIANT.BOLDFRAKTUR,script:d.VARIANT.BOLDSCRIPT,"sans-serif":d.VARIANT.BOLDSANSSERIF,"sans-serif-italic":d.VARIANT.SANSSERIFBOLDITALIC}[t]||t}else{if(s.weight==="normal"){t={bold:d.VARIANT.normal,"bold-italic":d.VARIANT.ITALIC,"bold-fraktur":d.VARIANT.FRAKTUR,"bold-script":d.VARIANT.SCRIPT,"bold-sans-serif":d.VARIANT.SANSSERIF,"sans-serif-bold-italic":d.VARIANT.SANSSERIFITALIC}[t]||t}}if(s.style==="italic"){t={normal:d.VARIANT.ITALIC,bold:d.VARIANT.BOLDITALIC,"sans-serif":d.VARIANT.SANSSERIFITALIC,"bold-sans-serif":d.VARIANT.SANSSERIFBOLDITALIC}[t]||t}else{if(s.style==="normal"){t={italic:d.VARIANT.NORMAL,"bold-italic":d.VARIANT.BOLD,"sans-serif-italic":d.VARIANT.SANSSERIF,"sans-serif-bold-italic":d.VARIANT.BOLDSANSSERIF}[t]||t}}this.CHTMLvariant=a.FONTDATA.VARIANT[t]||a.FONTDATA.VARIANT[d.VARIANT.NORMAL]},CHTMLbboxFor:function(s){if(this.data[s]&&this.data[s].CHTML){return this.data[s].CHTML}return a.BBOX.zero()},CHTMLdrawBBox:function(t,u){if(!u){u=this.CHTML}var s=a.Element("mjx-box",{style:{opacity:0.25,"margin-left":a.Em(-(u.w+(u.R||0)))}},[["mjx-box",{style:{height:a.Em(u.h),width:a.Em(u.w),"background-color":"red"}}],["mjx-box",{style:{height:a.Em(u.d),width:a.Em(u.w),"margin-left":a.Em(-u.w),"vertical-align":a.Em(-u.d),"background-color":"green"}}]]);if(t.nextSibling){t.parentNode.insertBefore(s,t.nextSibling)}else{t.parentNode.appendChild(s)}},CHTMLnotEmpty:function(s){while(s&&s.data.length<2&&(s.type==="mrow"||s.type==="texatom")){s=s.data[0]}return !!s}},{CHTMLautoload:function(){this.constructor.Augment({toCommonHTML:d.mbase.CHTMLautoloadFail});var s=a.autoloadDir+"/"+this.type+".js";f.RestartAfter(o.Require(s))},CHTMLautoloadFail:function(){throw Error("CommonHTML can't autoload '"+this.type+"'")},CHTMLautoloadList:{},CHTMLautoloadFile:function(s){if(d.mbase.CHTMLautoloadList.hasOwnProperty(s)){throw Error("CommonHTML can't autoload file '"+s+"'")}d.mbase.CHTMLautoloadList[s]=true;var t=a.autoloadDir+"/"+s+".js";f.RestartAfter(o.Require(t))},CHTMLstretchV:function(s,t){this.Core().CHTMLstretchV(s,t);this.toCommonHTML(this.CHTMLnodeElement(),{stretch:true});return this.CHTML},CHTMLstretchH:function(t,s){this.CHTMLupdateFrom(this.CHTMLstretchCoreH(t,s));this.toCommonHTML(t,{stretch:true});return this.CHTML}});d.chars.Augment({toCommonHTML:function(t,s){this.CHTML=null;if(s==null){s={}}var u=this.toString();if(s.remap){u=s.remap(u,s.remapchars)}this.CHTMLhandleText(t,u,s.variant||this.parent.CHTMLvariant)}});d.entity.Augment({toCommonHTML:function(t,s){if(s==null){s={}}var u=this.toString();if(s.remapchars){u=s.remap(u,s.remapchars)}this.CHTMLhandleText(t,u,s.variant||this.parent.CHTMLvariant)}});d.math.Augment({toCommonHTML:function(x){x=this.CHTMLdefaultNode(x);if(this.CHTML.w<0){x.parentNode.style.width="0px";x.parentNode.style.marginRight=a.Em(this.CHTML.w)}var v=this.Get("alttext");if(v&&!x.getAttribute("aria-label")){x.setAttribute("aria-label",v)}if(this.CHTML.pwidth){x.parentNode.style.minWidth=this.CHTML.mwidth||a.Em(this.CHTML.w);x.parentNode.className="mjx-full-width "+x.parentNode.className;x.style.width=this.CHTML.pwidth}else{if(!this.isMultiline&&this.Get("display")==="block"){var u=this.getValues("indentalignfirst","indentshiftfirst","indentalign","indentshift");if(u.indentalignfirst!==d.INDENTALIGN.INDENTALIGN){u.indentalign=u.indentalignfirst}if(u.indentalign===d.INDENTALIGN.AUTO){u.indentalign=r.displayAlign}if(u.indentshiftfirst!==d.INDENTSHIFT.INDENTSHIFT){u.indentshift=u.indentshiftfirst}if(u.indentshift==="auto"){u.indentshift="0"}var t=this.CHTMLlength2em(u.indentshift,a.cwidth);if(r.displayIndent!=="0"){var s=this.CHTMLlength2em(r.displayIndent,a.cwidth);t+=(u.indentalign===d.INDENTALIGN.RIGHT?-s:s)}var w=x.parentNode.parentNode.style;x.parentNode.style.textAlign=w.textAlign=u.indentalign;if(t){t*=a.em/a.outerEm;f.Insert(w,({left:{marginLeft:a.Em(t)},right:{marginRight:a.Em(-t)},center:{marginLeft:a.Em(t),marginRight:a.Em(-t)}})[u.indentalign])}}}return x}});d.mi.Augment({toCommonHTML:function(s){s=this.CHTMLdefaultNode(s);var u=this.CHTML,t=this.data.join("");if(u.skew!=null&&!a.isChar(t)){delete u.skew}if(u.r>u.w&&a.isChar(t)&&!this.CHTMLvariant.noIC){u.ic=u.r-u.w;u.w=u.r;s.lastChild.style.paddingRight=a.Em(u.ic)}return s}});d.mn.Augment({CHTMLremapMinus:function(s){return s.replace(/^-/,"\u2212")},toCommonHTML:function(s){s=this.CHTMLdefaultNode(s,{childOptions:{remap:this.CHTMLremapMinus}});var u=this.CHTML,t=this.data.join("");if(u.skew!=null&&!a.isChar(t)){delete u.skew}if(u.r>u.w&&a.isChar(t)&&!this.CHTMLvariant.noIC){u.ic=u.r-u.w;u.w=u.r;s.lastChild.style.paddingRight=a.Em(u.ic)}return s}});d.mo.Augment({toCommonHTML:function(v){v=this.CHTMLcreateNode(v);this.CHTMLhandleStyle(v);this.CHTMLgetVariant();this.CHTMLhandleScale(v);a.BBOX.empty(this.CHTML);var t=this.getValues("displaystyle","largeop");t.variant=this.CHTMLvariant;t.text=this.data.join("");if(t.text==""){if(this.fence){v.style.width=a.Em(a.TEX.nulldelimiterspace)}}else{this.CHTMLadjustAccent(t);this.CHTMLadjustVariant(t);for(var u=0,s=this.data.length;u<s;u++){this.CHTMLaddChild(v,u,{childOptions:{variant:t.mathvariant,remap:this.remap,remapchars:t.remapchars}})}if(!a.isChar(t.text)){delete this.CHTML.skew}else{if(this.CHTML.w===0&&this.CHTML.l<0){this.CHTMLfixCombiningChar(v)}}if(t.largeop){this.CHTMLcenterOp(v)}}this.CHTML.clean();this.CHTMLhandleBBox(v);this.CHTMLhandleSpace(v);this.CHTMLhandleColor(v);return v},CHTMLhandleSpace:function(v){if(this.hasMMLspacing()){var t=this.getValues("scriptlevel","lspace","rspace");t.lspace=Math.max(0,this.CHTMLlength2em(t.lspace));t.rspace=Math.max(0,this.CHTMLlength2em(t.rspace));if(t.scriptlevel>0){if(!this.hasValue("lspace")){t.lspace=0.15}if(!this.hasValue("rspace")){t.rspace=0.15}}var s=this,u=this.Parent();while(u&&u.isEmbellished()&&u.Core()===s){s=u;u=u.Parent();v=s.CHTMLnodeElement()}if(t.lspace){v.style.paddingLeft=a.Em(t.lspace)}if(t.rspace){v.style.paddingRight=a.Em(t.rspace)}this.CHTML.L=t.lspace;this.CHTML.R=t.rspace}else{this.SUPER(arguments).CHTMLhandleSpace.apply(this,arguments)}},CHTMLadjustAccent:function(u){var t=this.CoreParent();u.parent=t;if(a.isChar(u.text)&&t&&t.isa(d.munderover)){var v=t.data[t.over],s=t.data[t.under];if(v&&this===v.CoreMO()&&t.Get("accent")){u.remapchars=a.FONTDATA.REMAPACCENT}else{if(s&&this===s.CoreMO()&&t.Get("accentunder")){u.remapchars=a.FONTDATA.REMAPACCENTUNDER}}}},CHTMLadjustVariant:function(t){var s=t.parent,u=(s&&s.isa(d.msubsup)&&this!==s.data[s.base]);if(t.largeop){t.mathvariant=(t.displaystyle?"-largeOp":"-smallOp")}if(u){t.remapchars=this.remapChars;if(t.text.match(/['`"\u00B4\u2032-\u2037\u2057]/)){t.mathvariant="-TeX-variant"}}},CHTMLfixCombiningChar:function(s){s=s.firstChild;var t=a.Element("mjx-box",{style:{width:".25em","margin-left":"-.25em"}});s.insertBefore(t,s.firstChild)},CHTMLcenterOp:function(s){var u=this.CHTML;var t=(u.h-u.d)/2-a.TEX.axis_height;if(Math.abs(t)>0.001){s.style.verticalAlign=a.Em(-t)}u.h-=t;u.d+=t;if(u.r>u.w){u.ic=u.r-u.w;u.w=u.r;s.style.paddingRight=a.Em(u.ic)}},CHTMLcanStretch:function(w,u,v){if(!this.Get("stretchy")){return false}var x=this.data.join("");if(!a.isChar(x)){return false}var t={text:x};this.CHTMLadjustAccent(t);if(t.remapchars){x=t.remapchars[x]||x}x=a.FONTDATA.DELIMITERS[x.charCodeAt(0)];var s=(x&&x.dir===w.substr(0,1));if(s){s=(this.CHTML.h!==u||this.CHTML.d!==v||!!this.Get("minsize",true)||!!this.Get("maxsize",true));if(s){this.CHTML.stretch=true}}return s},CHTMLstretchV:function(v,y){var w=this.CHTMLnodeElement(),x=this.CHTML;var t=this.getValues("symmetric","maxsize","minsize");var u,s=a.TEX.axis_height;if(t.symmetric){u=2*Math.max(v-s,y+s)}else{u=v+y}t.maxsize=this.CHTMLlength2em(t.maxsize,x.h+x.d);t.minsize=this.CHTMLlength2em(t.minsize,x.h+x.d);u=Math.max(t.minsize,Math.min(t.maxsize,u));if(u!==x.sH){if(u!=t.minsize){u=[Math.max(u*a.TEX.delimiterfactor/1000,u-a.TEX.delimitershortfall),u]}while(w.firstChild){w.removeChild(w.firstChild)}this.CHTML=x=a.createDelimiter(w,this.data.join("").charCodeAt(0),u,x);x.sH=(u instanceof Array?u[1]:u);if(t.symmetric){u=(x.h+x.d)/2+s}else{u=(x.h+x.d)*v/(v+y)}u-=x.h;if(Math.abs(u)>0.05){w.style.verticalAlign=a.Em(u);x.h+=u;x.d-=u;x.t+=u;x.b-=u}}return this.CHTML},CHTMLstretchH:function(u,s){var v=this.CHTML;var t=this.getValues("maxsize","minsize","mathvariant","fontweight");if((t.fontweight==="bold"||(this.removedStyles||{}).fontWeight==="bold"||parseInt(t.fontweight)>=600)&&!this.Get("mathvariant",true)){t.mathvariant=d.VARIANT.BOLD}t.maxsize=this.CHTMLlength2em(t.maxsize,v.w);t.minsize=this.CHTMLlength2em(t.minsize,v.w);s=Math.max(t.minsize,Math.min(t.maxsize,s));if(s!==v.sW){while(u.firstChild){u.removeChild(u.firstChild)}this.CHTML=v=a.createDelimiter(u,this.data.join("").charCodeAt(0),s,v,t.mathvariant);v.sW=s}return this.CHTML}});d.mtext.Augment({CHTMLgetVariant:function(){if(a.config.mtextFontInherit||this.Parent().type==="merror"){var u=(a.config.scale/100)/a.scale;var t={cache:{},fonts:[],className:"MJXc-font-inherit",rscale:u,style:{"font-size":a.Percent(u)}};var s=this.Get("mathvariant");if(s.match(/bold/)){t.style["font-weight"]="bold"}if(s.match(/italic|-tex-mathit/)){t.style["font-style"]="italic"}if(s==="monospace"){t.className+=" MJXc-monospace-font"}if(s==="double-struck"){t.className+=" MJXc-double-struck-font"}if(s.match(/fraktur/)){t.className+=" MJXc-fraktur-font"}if(s.match(/sans-serif/)){t.className+=" MJXc-sans-serif-font"}if(s.match(/script/)){t.className+=" MJXc-script-font"}this.CHTMLvariant=t}else{this.SUPER(arguments).CHTMLgetVariant.call(this)}}});d.merror.Augment({toCommonHTML:function(s){s=this.CHTMLdefaultNode(s);var t=this.CHTML;t.rescale(0.9);t.h+=3/a.em;if(t.h>t.t){t.t=t.h}t.d+=3/a.em;if(t.d>t.b){t.b=t.d}t.w+=8/a.em;t.r=t.w;t.l=0;return s}});d.mspace.Augment({toCommonHTML:function(v){v=this.CHTMLcreateNode(v);this.CHTMLhandleStyle(v);this.CHTMLhandleScale(v);var t=this.getValues("height","depth","width");var s=this.CHTMLlength2em(t.width),u=this.CHTMLlength2em(t.height),y=this.CHTMLlength2em(t.depth);var x=this.CHTML;x.w=x.r=s;x.h=x.t=u;x.d=x.b=y;x.l=0;if(s<0){v.style.marginRight=a.Em(s);s=0}v.style.width=a.Em(s);v.style.height=a.Em(Math.max(0,u+y));if(y){v.style.verticalAlign=a.Em(-y)}this.CHTMLhandleBBox(v);this.CHTMLhandleColor(v);return v}});d.mpadded.Augment({toCommonHTML:function(t,F){var s;if(F&&F.stretch){t=t.firstChild;s=t.firstChild}else{t=this.CHTMLdefaultNode(t,{childNodes:"mjx-box",forceChild:true});s=t.firstChild;t=a.addElement(t,"mjx-block");t.appendChild(s);a.addElement(t,"mjx-strut")}var z=this.CHTMLbboxFor(0);var D=this.getValues("width","height","depth","lspace","voffset");var B=0,A=0,C=z.w,u=z.h,v=z.d;s.style.width=0;s.style.margin=a.Em(-u)+" 0 "+a.Em(-v);if(D.width!==""){C=this.CHTMLdimen(D.width,"w",C,0)}if(D.height!==""){u=this.CHTMLdimen(D.height,"h",u,0)}if(D.depth!==""){v=this.CHTMLdimen(D.depth,"d",v,0)}if(D.voffset!==""){A=this.CHTMLdimen(D.voffset);if(A){s.style.position="relative";s.style.top=a.Em(-A)}}if(D.lspace!==""){B=this.CHTMLdimen(D.lspace);if(B){s.style.position="relative";s.style.left=a.Em(B)}}t.style.width=0;t.style.marginTop=a.Em(u-k);t.style.padding="0 "+a.Em(C)+" "+a.Em(v)+" 0";var E=a.BBOX({w:C,h:u,d:v,l:0,r:C,t:u,b:v,scale:this.CHTML.scale,rscale:this.CHTML.rscale});E.combine(z,B,A);E.w=C;E.h=u;E.d=v;this.CHTML=E;return t.parentNode},CHTMLstretchV:d.mbase.CHTMLstretchV,CHTMLstretchH:d.mbase.CHTMLstretchH,CHTMLdimen:function(w,y,x,s){if(s==null){s=-i}w=String(w);var t=w.match(/width|height|depth/);var u=(t?this.CHTML[t[0].charAt(0)]:(y?this.CHTML[y]:0));var v=(this.CHTMLlength2em(w,u)||0);if(w.match(/^[-+]/)&&x!=null){v+=x}if(s!=null){v=Math.max(s,v)}return v}});d.munderover.Augment({toCommonHTML:function(w,G){var E=this.getValues("displaystyle","accent","accentunder","align");var u=this.data[this.base];if(!E.displaystyle&&u!=null&&(u.movablelimits||u.CoreMO().Get("movablelimits"))){return d.msubsup.prototype.toCommonHTML.call(this,w,t)}var B,z,s=[],t=false;if(G&&G.stretch){if(this.data[this.base]){u=a.getNode(w,"mjx-op")}if(this.data[this.under]){B=a.getNode(w,"mjx-under")}if(this.data[this.over]){z=a.getNode(w,"mjx-over")}s[0]=u;s[1]=B||z;s[2]=z;t=true}else{var y=["mjx-op","mjx-under","mjx-over"];if(this.over===1){y[1]=y[2]}w=this.CHTMLdefaultNode(w,{childNodes:y,noBBox:true,forceChild:true,minChildren:2});s[0]=u=w.removeChild(w.firstChild);s[1]=B=z=w.removeChild(w.firstChild);if(w.firstChild){s[2]=z=w.removeChild(w.firstChild)}}var x=[],v=this.CHTMLgetBBoxes(x,s,E);var F=x[this.base],C=this.CHTML;C.w=v;C.h=F.h;C.d=F.d;if(F.h<0.35){u.style.marginTop=a.Em(F.h-0.35)}if(E.accent&&F.h<a.TEX.x_height){C.h+=a.TEX.x_height-F.h;u.style.marginTop=a.Em(a.TEX.x_height-Math.max(F.h,0.35));F.h=a.TEX.x_height}var A=u,D=0;if(F.ic){D=1.3*F.ic+0.05}if(this.data[this.over]){A=this.CHTMLaddOverscript(z,x,E,D,u,t)}if(this.data[this.under]){this.CHTMLaddUnderscript(B,x,E,D,w,A,t)}else{if(!t){w.appendChild(A)}}this.CHTMLplaceBoxes(u,B,z,E,x);return w},CHTMLgetBBoxes:function(A,x,v){var y,t=this.data.length,z,u=-i,s=u;for(y=0;y<t;y++){A[y]=this.CHTMLbboxFor(y);A[y].x=A[y].y=0;if(this.data[y]){A[y].stretch=this.data[y].CHTMLcanStretch("Horizontal")}z=(y===this.base?1:A[y].rscale);if(y!==this.base){delete A[y].L;delete A[y].R}s=Math.max(s,z*(A[y].w+(A[y].L||0)+(A[y].R||0)));if(!A[y].stretch&&s>u){u=s}}if(u===-i){u=s}for(y=0;y<t;y++){if(A[y].stretch){z=(y===this.base?1:A[y].rscale);A[y]=this.data[y].CHTMLstretchH(x[y].firstChild,u/z);A[y].x=A[y].y=0;s=Math.max(s,z*(A[y].w+(A[y].L||0)+(A[y].R||0)))}}if(!A[this.base]){A[this.base]=a.BBOX.empty()}return s},CHTMLaddOverscript:function(B,z,F,E,t,s){var D=this.CHTML;var y,x,w=a.TEX.big_op_spacing5,v;var A=z[this.over],G=z[this.base],u=A.rscale;if(!s){var C=a.Element("mjx-stack");C.appendChild(B);C.appendChild(t)}if(A.D){A.d=A.D}if(A.d<0){B.firstChild.style.verticalAlign="top";B.style.height=a.Em(A.h+A.d)}A.x=0;if(F.accent){if(A.w<0.001){A.x+=(A.r-A.l)/2}v=a.TEX.rule_thickness;w=0;if(G.skew){A.x+=u*G.skew;D.skew=u*G.skew;if(A.x+u*A.w>D.w){D.skew+=(D.w-(A.x+u*A.w))/2}}}else{y=a.TEX.big_op_spacing1;x=a.TEX.big_op_spacing3;v=Math.max(y,x-Math.max(0,u*A.d))}A.x+=E/2;A.y=D.h+v+w+u*A.d;if(v){B.style.paddingBottom=a.Em(v/u)}if(w){B.style.paddingTop=a.Em(w/u)}return C},CHTMLaddUnderscript:function(B,z,E,D,t,A,s){var C=this.CHTML;var y,x,w=a.TEX.big_op_spacing5,v;var F=z[this.under],u=F.rscale;if(!s){a.addElement(t,"mjx-itable",{},[["mjx-row",{},[["mjx-cell"]]],["mjx-row"]]);t.firstChild.firstChild.firstChild.appendChild(A);t.firstChild.lastChild.appendChild(B)}if(F.D){F.d=F.D}if(F.d<0){B.firstChild.style.verticalAlign="top";t.firstChild.style.marginBottom=a.Em(F.d)}if(E.accentunder){v=2*a.TEX.rule_thickness;w=0}else{y=a.TEX.big_op_spacing2;x=a.TEX.big_op_spacing4;v=Math.max(y,x-u*F.h)}F.x=-D/2;F.y=-(C.d+v+w+u*F.h);if(v){B.style.paddingTop=a.Em(v/u)}if(w){B.style.paddingBottom=a.Em(w/u)}},CHTMLplaceBoxes:function(s,B,A,E,z){var t=this.CHTML.w,y,v=z.length,x;var D=a.BBOX.zero();D.scale=this.CHTML.scale;D.rscale=this.CHTML.rscale;z[this.base].x=z[this.base].y=0;var F=i;for(y=0;y<v;y++){x=(y===this.base?1:z[y].rscale);var C=x*(z[y].w+(z[y].L||0)+(z[y].R||0));z[y].x+={left:0,center:(t-C)/2,right:t-C}[E.align];if(z[y].x<F){F=z[y].x}}for(y=0;y<v;y++){if(this.data[y]){x=(y===this.base?1:z[y].rscale);if(z[y].x-F){var u=(y===this.base?s:y===this.over?A:B);u.style.paddingLeft=a.Em((z[y].x-F)/x)}D.combine(z[y],z[y].x-F,z[y].y)}}this.CHTML=D},CHTMLstretchV:d.mbase.CHTMLstretchV,CHTMLstretchH:d.mbase.CHTMLstretchH,CHTMLchildNode:function(u,t){var s=["mjx-op","mjx-under","mjx-over"];if(this.over===1){s[1]=s[2]}return a.getNode(u,s[t])}});d.msubsup.Augment({toCommonHTML:function(S,C){var A=this.getValues("displaystyle","subscriptshift","superscriptshift","texprimestyle");var D,H,z;if(C&&C.stretch){if(this.data[this.base]){D=a.getNode(S,"mjx-base")}if(this.data[this.sub]){H=a.getNode(S,"mjx-sub")}if(this.data[this.sup]){z=a.getNode(S,"mjx-sup")}E=a.getNode(S,"mjx-stack")}else{var K=["mjx-base","mjx-sub","mjx-sup"];if(this.sup===1){K[1]=K[2]}S=this.CHTMLdefaultNode(S,{childNodes:K,noBBox:true,forceChild:true,minChildren:3});D=S.childNodes[this.base];H=S.childNodes[this.sub];z=S.childNodes[this.sup];if(!this.CHTMLnotEmpty(this.data[this.sub])){S.removeChild(H);H=null}if(!this.CHTMLnotEmpty(this.data[this.sup])){S.removeChild(z);z=null}if(S.childNodes.length===3){var E=a.addElement(S,"mjx-stack");E.appendChild(z);E.appendChild(H)}}var F=[],G=a.BBOX.empty(this.CHTML);for(var V=0,T=this.data.length;V<T;V++){F[V]=this.CHTMLbboxFor(V)}var y=F[this.base]||a.BBOX.empty(),P=F[this.sub],W=F[this.sup];var B=(H?P.rscale:1),w=(z?W.rscale:1);G.combine(y,0,0);var X=a.TEX.x_height,N=a.TEX.scriptspace;var Q=a.TEX.sup_drop*w,O=a.TEX.sub_drop*B;var L=y.h-Q,J=y.d+O,Y=0,R;if(y.ic){G.w-=y.ic;D.style.marginRight=a.Em(-y.ic);Y=1.3*y.ic+0.05}var U=this.data[this.base];if(U){if((U.type==="mrow"||U.type==="mstyle")&&U.data.length===1){U=U.data[0]}if(U.type==="mi"||U.type==="mo"){if(a.isChar(U.data.join(""))&&y.rscale===1&&!y.sH&&!U.Get("largeop")){L=J=0}}}A.subscriptshift=(A.subscriptshift===""?0:this.CHTMLlength2em(A.subscriptshift));A.superscriptshift=(A.superscriptshift===""?0:this.CHTMLlength2em(A.superscriptshift));var I=G.w;if(H){P.w+=N}if(z){W.w+=N}if(!z){if(H){J=Math.max(J,a.TEX.sub1,B*P.h-(4/5)*X,A.subscriptshift);H.style.verticalAlign=a.Em(-J/B);H.style.paddingRight=a.Em(N/B);G.combine(P,I,-J)}}else{if(!H){R=a.TEX[(A.displaystyle?"sup1":(A.texprimestyle?"sup3":"sup2"))];L=Math.max(L,R,w*W.d+(1/4)*X,A.superscriptshift);z.style.verticalAlign=a.Em(L/w);z.style.paddingLeft=a.Em(Y/w);z.style.paddingRight=a.Em(N/w);G.combine(W,I+Y,L)}else{J=Math.max(J,a.TEX.sub2);var M=a.TEX.rule_thickness;if((L-w*W.d)-(B*P.h-J)<3*M){J=3*M-L+w*W.d+B*P.h;Q=(4/5)*X-(L-w*W.d);if(Q>0){L+=Q;J-=Q}}L=Math.max(L,A.superscriptshift);J=Math.max(J,A.subscriptshift);H.style.paddingRight=a.Em(N/B);z.style.paddingBottom=a.Em(L/w+J/B-W.d-P.h/B*w);z.style.paddingLeft=a.Em(Y/w);z.style.paddingRight=a.Em(N/w);E.style.verticalAlign=a.Em(-J);G.combine(W,I+Y,L);G.combine(P,I,-J)}}G.clean();return S},CHTMLstretchV:d.mbase.CHTMLstretchV,CHTMLstretchH:d.mbase.CHTMLstretchH,CHTMLchildNode:function(u,t){var s=["mjx-base","mjx-sub","mjx-sup"];if(this.over===1){s[1]=s[2]}return a.getNode(u,s[t])}});d.mfrac.Augment({toCommonHTML:function(N){N=this.CHTMLdefaultNode(N,{childNodes:["mjx-numerator","mjx-denominator"],childOptions:{autowidth:true},forceChild:true,noBBox:true,minChildren:2});var x=this.getValues("linethickness","displaystyle","numalign","denomalign","bevelled");var O=x.displaystyle;var D=N.firstChild,w=N.lastChild;var y=a.addElement(N,"mjx-box");y.appendChild(D);y.appendChild(w);N.appendChild(y);if(x.numalign!=="center"){D.style.textAlign=x.numalign}if(x.denomalign!=="center"){w.style.textAlign=x.denomalign}var P=this.CHTMLbboxFor(0),B=this.CHTMLbboxFor(1),C=a.BBOX.empty(this.CHTML),F=P.rscale,z=B.rscale;x.linethickness=Math.max(0,a.thickness2em(x.linethickness||"0",C.scale));var M=a.TEX.min_rule_thickness/a.em,T=a.TEX.axis_height;var J=x.linethickness,L,K,I,G;if(x.bevelled){y.className+=" MJXc-bevelled";var S=(O?0.4:0.15);var E=Math.max(F*(P.h+P.d),z*(B.h+B.d))+2*S;var R=a.Element("mjx-bevel");y.insertBefore(R,w);var s=a.createDelimiter(R,47,E);I=F*(P.d-P.h)/2+T+S;G=z*(B.d-B.h)/2+T-S;if(I){D.style.verticalAlign=a.Em(I/F)}if(G){w.style.verticalAlign=a.Em(G/z)}R.style.marginLeft=R.style.marginRight=a.Em(-S/2);C.combine(P,0,I);C.combine(s,F*P.w-S/2,0);C.combine(B,F*P.w+s.w-S,G);C.clean()}else{y.className+=" MJXc-stacked";if(O){I=a.TEX.num1;G=a.TEX.denom1}else{I=(J===0?a.TEX.num3:a.TEX.num2);G=a.TEX.denom2}if(J===0){L=Math.max((O?7:3)*a.TEX.rule_thickness,2*M);K=(I-P.d*F)-(B.h*z-G);if(K<L){I+=(L-K)/2;G+=(L-K)/2}}else{L=Math.max((O?2:0)*M+J,J/2+1.5*M);J=Math.max(J,M);K=(I-P.d*F)-(T+J/2);if(K<L){I+=(L-K)}K=(T-J/2)-(B.h*z-G);if(K<L){G+=(L-K)}P.L=P.R=B.L=B.R=0.1;var A=a.addElement(y,"mjx-line",{style:{"border-bottom":a.Px(J*C.scale,1)+" solid",top:a.Em(-J/2-T)}})}C.combine(P,0,I);C.combine(B,0,-G);C.clean();y.style.width=a.Em(C.w);D.style.width=a.Em(C.w/F);w.style.width=a.Em(C.w/z);if(A){A.style.width=y.style.width}D.style.top=a.Em(-C.h/F);w.style.bottom=a.Em(-C.d/z);a.addElement(N,"mjx-vsize",{style:{height:a.Em(C.h+C.d),verticalAlign:a.Em(-C.d)}})}if(!this.texWithDelims){var Q=a.TEX.nulldelimiterspace;y.style.padding="0 "+a.Em(Q);C.l+=Q;C.r+=Q;C.w+=2*Q}return N},CHTMLcanStretch:function(s){return false}});d.msqrt.Augment({toCommonHTML:function(w){w=this.CHTMLdefaultNode(w,{childNodes:["mjx-box","mjx-root"],forceChild:true,noBBox:true});var v=w.firstChild||a.Element("mjx-box");var E=a.addElement(w,"mjx-box");E.appendChild(v);var F=this.CHTMLbboxFor(0),C=a.BBOX.empty(this.CHTML);var G=a.TEX.rule_thickness,y=a.TEX.surd_height,u=G,s,D;if(this.Get("displaystyle")){u=a.TEX.x_height}s=G+u/4;D=F.h+F.d+s+G;var z=a.Element("mjx-surd");E.insertBefore(z,v);var A=a.createDelimiter(z,8730,[D-0.04,D]);if(A.h+A.d>D){s=((A.h+A.d)-(D-G))/2}D=F.h+s+G;var B=this.CHTMLaddRoot(w,A,A.h+A.d-D);v.style.paddingTop=a.Em(s);v.style.borderTop=a.Px(y*F.scale,1)+" solid";E.style.paddingTop=a.Em(2*G-y);F.h+=s+2*G;C.combine(A,B,D-A.h);C.combine(F,B+A.w,0);C.clean();return w},CHTMLaddRoot:function(){return 0},CHTMLhandleBBox:function(s){var t=this.CHTMLbboxFor(0);delete t.pwidth;this.SUPER(arguments).CHTMLhandleBBox.apply(this,arguments)}});d.mroot.Augment({toCommonHTML:d.msqrt.prototype.toCommonHTML,CHTMLhandleBBox:d.msqrt.prototype.CHTMLhandleBBox,CHTMLaddRoot:function(A,u,v){if(!this.data[1]){return}var z=this.CHTML,B=this.data[1].CHTML,x=A.firstChild;var s=B.rscale;var t=this.CHTMLrootHeight(B,u,s)-v;var y=Math.min(B.w,B.r);var C=Math.max(y,u.offset/s);if(t){x.style.verticalAlign=a.Em(t/s)}if(C>y){x.firstChild.style.paddingLeft=a.Em(C-y)}C-=u.offset/s;x.style.width=a.Em(C);z.combine(B,0,t);return C*s},CHTMLrootHeight:function(u,s,t){return 0.45*(s.h+s.d-0.9)+s.offset+Math.max(0,u.d-0.075)}});d.mfenced.Augment({toCommonHTML:function(v){v=this.CHTMLcreateNode(v);this.CHTMLhandleStyle(v);this.CHTMLhandleScale(v);this.CHTMLaddChild(v,"open",{});for(var u=0,s=this.data.length;u<s;u++){this.CHTMLaddChild(v,"sep"+u,{});this.CHTMLaddChild(v,u,{})}this.CHTMLaddChild(v,"close",{});var t=this.CHTML.h,w=this.CHTML.d;this.CHTMLstretchChildV("open",t,w);for(u=0,s=this.data.length;u<s;u++){this.CHTMLstretchChildV("sep"+u,t,w);this.CHTMLstretchChildV(u,t,w)}this.CHTMLstretchChildV("close",t,w);this.CHTMLhandleSpace(v);this.CHTMLhandleBBox(v);this.CHTMLhandleColor(v);return v}});d.mrow.Augment({toCommonHTML:function(w,t){t=t||{};w=this.CHTMLdefaultNode(w);var z=this.CHTML,v=z.h,x=z.d,y;for(var u=0,s=this.data.length;u<s;u++){this.CHTMLstretchChildV(u,v,x);if(this.data[u]&&this.data[u].CHTML&&this.data[u].CHTML.w<0){y=true}}if(this.CHTMLlineBreaks()){this.CHTMLmultiline(w);if(t.autowidth){w.style.width=""}}else{if(y&&z.w){w.style.width=a.Em(Math.max(0,z.w))}if(z.w<0){w.style.marginRight=a.Em(z.w)}}return w},CHTMLlineBreaks:function(){if(!this.parent.linebreakContainer){return false}return(c.automatic&&this.CHTML.w>a.linebreakWidth)||this.hasNewline()},CHTMLstretchV:function(s,t){this.CHTMLstretchChildV(this.CoreIndex(),s,t);return this.CHTML},CHTMLstretchH:function(t,s){this.CHTMLstretchChildH(this.CoreIndex(),s,t);return this.CHTML}});d.TeXAtom.Augment({toCommonHTML:function(x,w){if(!w||!w.stretch){x=this.CHTMLdefaultNode(x)}if(this.texClass===d.TEXCLASS.VCENTER){var s=a.TEX.axis_height,u=this.CHTML;var t=s-(u.h+u.d)/2+u.d;if(Math.abs(t)>0.001){x.style.verticalAlign=a.Em(t);u.h+=t;u.t+=t;u.d-=t;u.b-=t}}return x},CHTMLstretchV:function(s,t){this.CHTMLupdateFrom(this.Core().CHTMLstretchV(s,t));this.toCommonHTML(this.CHTMLnodeElement(),{stretch:true});return this.CHTML},CHTMLstretchH:function(t,s){this.CHTMLupdateFrom(this.CHTMLstretchCoreH(t,s));this.toCommonHTML(t,{stretch:true});return this.CHTML}});d.semantics.Augment({toCommonHTML:function(s){s=this.CHTMLcreateNode(s);if(this.data[0]){this.data[0].toCommonHTML(s);this.CHTMLupdateFrom(this.data[0].CHTML);this.CHTMLhandleBBox(s)}return s}});d.annotation.Augment({toCommonHTML:function(s){return this.CHTMLcreateNode(s)}});d["annotation-xml"].Augment({toCommonHTML:d.mbase.CHTMLautoload});d.ms.Augment({toCommonHTML:d.mbase.CHTMLautoload});d.mglyph.Augment({toCommonHTML:d.mbase.CHTMLautoload});d.menclose.Augment({toCommonHTML:d.mbase.CHTMLautoload});d.maction.Augment({toCommonHTML:d.mbase.CHTMLautoload});d.mmultiscripts.Augment({toCommonHTML:d.mbase.CHTMLautoload});d.mtable.Augment({toCommonHTML:d.mbase.CHTMLautoload});MathJax.Hub.Register.StartupHook("onLoad",function(){setTimeout(MathJax.Callback(["loadComplete",a,"jax.js"]),0)})});MathJax.Hub.Register.StartupHook("End Cookie",function(){if(f.config.menuSettings.zoom!=="None"){o.Require("[MathJax]/extensions/MathZoom.js")}})})(MathJax.Ajax,MathJax.Hub,MathJax.HTML,MathJax.OutputJax.CommonHTML);
|
PypiClean
|
/hindemith-0.1.1.tar.gz/hindemith-0.1.1/dev-examples/optical_flow/hs_jacobi_numpy.py
|
import numpy as np
from scipy.ndimage.filters import convolve
from solver import NumpySolver
def update(Ix, Iy, It, u, v, ubar, vbar, denom):
t = (Ix * ubar + Iy * vbar + It) / denom
u_new = ubar - Ix * t
v_new = vbar - Iy * t
err = np.square(u_new - u) + np.square(v_new - v)
return u_new, v_new, err
alpha = 15.0
alpha2 = alpha ** 2
def compute_denom(Ix, Iy):
return Ix * Ix + Iy * Iy + alpha2
jacobi = [[1.0/12.0, 1.0/6.0, 1.0/12.0],
[1.0/6.0, 0.0, 1.0/6.0],
[1.0/12.0, 1.0/6.0, 1.0/12.0]]
class HSJacobiNumpy(NumpySolver):
def solve(self, im0, im1, u, v):
# Ix, Iy, It = dx(im0, im1), dy(im0, im1), dt(im0, im1)
Iy, Ix = np.gradient(im1)
It = im1 - im0
denom = Ix * Ix + Iy * Iy + alpha2
epsilon = (0.0001 ** 2) * np.prod(u.shape)
for _ in range(100):
ubar = convolve(u, jacobi)
vbar = convolve(v, jacobi)
u, v, err = update(Ix, Iy, It, u, v, ubar, vbar, denom)
if np.sum(err) < epsilon:
break
return u, v
import cv2
w = 1.9
def get_a_d(im0, i1w, i1wx, u, i1wy, v):
dif = im0 - i1w + i1wx * u + i1wy * v
Au = dif * i1wx
Av = dif * i1wy
Du = i1wx * i1wx + alpha2
Dv = i1wy * i1wy + alpha2
D = i1wy * i1wx
return Au, Av, Du, Dv, D
def update(u, v, Au, D, Du, Av, Dv):
ubar = convolve(u, jacobi)
vbar = convolve(v, jacobi)
u_old = u
v_old = v
u = (1 - w) * u + w * (Au - D * v + alpha * ubar) / Du
v = (1 - w) * v + w * (Av - D * u + alpha * vbar) / Dv
err = np.square(u - u_old) + np.square(v - v_old)
return u, v, err
class HSJacobiNumpyMulti(NumpySolver):
def solve(self, im0, im1, u, v):
# Ix, Iy, It = dx(im0, im1), dy(im0, im1), dt(im0, im1)
Iy, Ix = np.gradient(im1)
epsilon = (0.0001 ** 2) * np.prod(u.shape)
ys, xs = np.indices(im1.shape).astype(np.float32)
for n in range(5):
i1w = cv2.remap(im1, xs + u, ys + v, cv2.INTER_LINEAR)
i1wy = cv2.remap(Iy, xs + u, ys + v, cv2.INTER_LINEAR)
i1wx = cv2.remap(Ix, xs + u, ys + v, cv2.INTER_LINEAR)
Au, Av, Du, Dv, D = get_a_d(im0, i1w, i1wx, u, i1wy, v)
for _ in range(100):
u, v, err = update(u, v, Au, D, Du, Av, Dv)
if np.sum(err) < epsilon:
break
return u, v
if __name__ == '__main__':
import cv2
frame0 = cv2.imread('images/frame0.png')
frame1 = cv2.imread('images/frame1.png')
frame0 = cv2.resize(frame0, (384, 288))
frame1 = cv2.resize(frame1, (384, 288))
im0 = cv2.cvtColor(frame0, cv2.COLOR_BGR2GRAY)
im1 = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
hs_jacobi = HSJacobiNumpyMulti(2, 1.0 / 3.0)
from ctree.util import Timer
hs_jacobi(im0, im1)
with Timer() as t:
u = hs_jacobi(im0, im1)
print(t.interval)
mag, ang = cv2.cartToPolar(u[0], u[1])
mag = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
ang = ang*180/np.pi/2
hsv = np.zeros_like(frame1)
hsv[..., 1] = 255
hsv[..., 0] = ang
hsv[..., 2] = mag
flow = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
cv2.imshow('flow', flow)
cv2.waitKey()
|
PypiClean
|
/email-commander-0.0.1.tar.gz/email-commander-0.0.1/README.md
|
Email Commander
===============
[](https://pypi.python.org/pypi/email-commander)
[](https://pypi.python.org/pypi/email-commander)
[](https://pypi.python.org/pypi/email-commander)
[](https://pepy.tech/project/email-commander)
[](https://saythanks.io/to/marcin%40urzenia.net)
`Email Commander` allows you to manage any host just by sending emails.
If you like this tool, just [say thanks](https://saythanks.io/to/marcin%40urzenia.net).
Current stable version
----------------------
0.0.1
Features
--------
* It's all Python!
Installation
------------
`Email Commander` should work on any POSIX platform where [Python](http://python.org)
is available, it means Linux, MacOS/OSX etc.
Simplest way is to use Python's built-in package system:
python3 -m pip install Email Commander
You can also use [pipx](https://pipxproject.github.io/pipx/) if you don't want to
mess with system packages and install `Email Commander` in virtual environment:
pipx install email-commander
Voila!
Python version
--------------
`Email Commander` is tested against Python 3.7+. Older Python versions may work, or may not.
How to use
----------
Help!
-----
I'm backend developer, not a frontend guy nor designer... And project requires some logo and/or icon.
If you're able to prepare some for this project, do not hesitate to [mail me](mailto:[email protected]) :)
Also, if you have an idea how to enhance `Email Commander`, [please fill the ticket](https://github.com/msztolcman/email-commander/issues).
Every idea, every feature request can help you, me and others!
Authors
-------
* Marcin Sztolcman ([[email protected]](mailto:[email protected]))
Contact
-------
If you like or dislike this software, please do not hesitate to tell me about
this me via email ([[email protected]](mailto:[email protected])).
If you find bug or have an idea to enhance this tool, please use GitHub's
[issues](https://github.com/msztolcman/email-commander/issues).
ChangeLog
---------
### v0.0.1
* initial
|
PypiClean
|
/al-model-trainer-0.3.0.tar.gz/al-model-trainer-0.3.0/bytegain/custom/data_import/clone_redshift_table.py
|
import psycopg2
import argparse
import time
import sys
import boto3
import os
import gzip
from datetime import date
class TableCloner(object):
def __init__(self, table_info, where, client, src_region, s3_id, s3_secret_key, dest_dir = None, use_tablename_column = False):
self._table_info = table_info
self._src_schema = table_info.schema
self._src_full_table = "%s.%s" % (self._src_schema, self._table_info.table)
self._where = where
self._s3_key_id = s3_id
self._s3_secret_key = s3_secret_key
self._client = client
self._now = date.today()
self._src_region = src_region
self._dest_dir = dest_dir
self._use_tablename_column = use_tablename_column
if src_region == "us-east-1":
self._s3_bucket = "bg-incoming-east"
else:
# TODO(jjs): Handle other regions
self._s3_bucket = "bg-incoming-data"
def get_s3_connection(self):
return boto3.client('s3',
aws_access_key_id=self._s3_key_id,
aws_secret_access_key=self._s3_secret_key)
def get_s3_dir(self):
if self._dest_dir:
return self._dest_dir
else:
date = self._now.strftime("%Y%m%d")
return "%s/%s_data/%s" % (self._client, self._src_full_table, date)
def get_where(self):
return self._table_info.get_full_where(("WHERE %s" % self._where) if self._where else None)
def get_select(self):
columns = ['"%s"' % x.name for x in self._table_info.columns]
if self._use_tablename_column:
columns.append("'%s'" % self._table_info.table)
columns = ",".join(columns)
return "select %s from %s %s" % (columns, self._src_full_table, self.get_where())
def run_select_from_redshift(self, src_conn, gzip):
src_cur = src_conn.cursor()
start_time = time.time()
statement = self.get_select()
statement = statement.replace("'", "\\'")
dest_csv = "%s/%s." % (self.get_s3_dir(), self._src_full_table)
manifest = dest_csv + "manifest"
statement = """
unload('%s') to 's3://%s/%s'
credentials 'aws_access_key_id=AKIAIGHWWWWWV534ALTA;aws_secret_access_key=%s'
ALLOWOVERWRITE PARALLEL ON %s ESCAPE MANIFEST
""" % (statement, self._s3_bucket, dest_csv, self._s3_secret_key, "GZIP" if gzip else "")
# print ("statement: %s" % statement)
src_cur.execute(statement)
sys.stdout.write(("Unloaded %s in %ds. " % (self._src_full_table, time.time() - start_time)))
sys.stdout.flush()
self._table_info.set_s3_manifest("s3://%s/%s" % (self._s3_bucket, manifest))
return manifest
def copy_from_s3(self, dst_conn, dst_table, manifest):
manifest_url = 's3://%s/%s' % (self._s3_bucket, manifest)
credentials = 'aws_iam_role=arn:aws:iam::087456767550:role/redshift-data-loader'
upload_statement = "copy %s from '%s' credentials '%s' MANIFEST ESCAPE GZIP COMPUPDATE ON EMPTYASNULL REGION '%s'" % (
dst_table, manifest_url, credentials, self._src_region)
upload_start = time.time()
dst_cur = dst_conn.cursor()
dst_cur.execute(upload_statement)
sys.stdout.write(("Uploaded %s in: %ds. " % (dst_table, time.time() - upload_start)))
sys.stdout.flush()
def clone_table(self, src_conn_factory, dst_conn_factory, dst_table_base, sort_key, skip_existing, incremental = False):
sys.stdout.write("Working on: %s.. " % self._table_info.table)
sys.stdout.flush()
total_time = time.time()
# Client is schema name
dst_table = "%s.%s" % (self._client, dst_table_base)
if skip_existing:
try:
dst_cur.execute("select 1 from %s" % dst_table)
sys.stdout.write("Already exists... Skipping\n")
return
except:
pass
src_cur = src_conn_factory.get_connection().cursor()
src_schema = self._src_schema
col_formats = []
for column in self._table_info.columns:
col_desc = '%s %s' % (column.name, column.datatype)
col_formats.append(col_desc)
dist_text = ("distkey(%s)" % sort_key) if sort_key else ""
sort_text = ("sortkey(%s)" % sort_key) if sort_key else ""
if not incremental:
dst_cur = dst_conn_factory.get_connection().cursor()
dst_cur.execute("drop table if exists %s" % dst_table)
create_str = "create table %s (%s) %s %s" % (dst_table, ', '.join(col_formats), dist_text, sort_text)
# print ("Executing: %s" % create_str)
dst_cur.execute(create_str)
manifest = self.run_select_from_redshift(src_conn_factory.get_connection())
self.copy_from_s3(dst_conn_factory.get_connection(), dst_table, manifest)
print("Total clone time: %ds" % (time.time() - total_time))
return True
|
PypiClean
|
/ucam-identitylib-2.0.2.tar.gz/ucam-identitylib-2.0.2/identitylib/identifiers.py
|
from typing import Optional, List, Callable, Dict
from collections import namedtuple
from dataclasses import dataclass
from functools import lru_cache
from frozendict import frozendict
@dataclass(frozen=True, eq=True)
class IdentifierScheme:
"""
A class which containing the information needed to represent an identifier scheme.
"""
identifier: str # the actual string which identifiers this scheme
common_name: str # a common name for this identifier (e.g. CRSid or USN)
# a dict of aliases - each value being a previous representation of this identifier
# keyed on a brief description of that alias
aliases: Dict[str, str]
value_parser: Optional[Callable] = None # a parser for the value of an id with this scheme
def __str__(self):
"""
Always return the scheme identifier as the string representation.
"""
return self.identifier
class IdentifierSchemes:
"""
A holder for all identifier schemes used within identity systems.
"""
"""
Person identifiers.
"""
CRSID = IdentifierScheme(
"v1.person.identifiers.cam.ac.uk",
"CRSid",
frozendict(
{
"deprecated": "person.crs.identifiers.uis.cam.ac.uk",
"deprecated-versioned": "person.v1.crs.identifiers.cam.ac.uk",
}
),
)
USN = IdentifierScheme(
"person.v1.student-records.university.identifiers.cam.ac.uk",
"USN",
frozendict(
{
"deprecated": "person.camsis.identifiers.admin.cam.ac.uk",
"deprecated-versioned": "person.v1.ust.identifiers.cam.ac.uk",
}
),
)
STAFF_NUMBER = IdentifierScheme(
"person.v1.human-resources.university.identifiers.cam.ac.uk",
"Staff Number",
frozendict(
{
"deprecated": "person.chris.identifiers.admin.cam.ac.uk",
"deprecated-versioned": "person.v1.uhr.identifiers.cam.ac.uk",
}
),
)
BOARD_OF_GRADUATE_STUDIES = IdentifierScheme(
"person.v1.board-of-graduate-studies.university.identifiers.cam.ac.uk",
"Board of Graduate Studies Identifier",
frozendict(
{
"deprecated": "person.bgs.identifiers.admin.cam.ac.uk",
}
),
)
LEGACY_CARDHOLDER = IdentifierScheme(
"person.v1.legacy-card.university.identifiers.cam.ac.uk",
"Legacy cardholder Identifier",
frozendict(
{
"deprecated": "person.legacy_card.identifiers.admin.cam.ac.uk",
}
),
)
"""
Institution identifiers.
"""
STUDENT_INSTITUTION = IdentifierScheme(
"institution.v1.student-records.university.identifiers.cam.ac.uk",
"Student Institution",
frozendict(
{
"deprecated": "institution.v1.ust.identifiers.cam.ac.uk",
"deprecated-mapping": "institution.v1.student.university.identifiers.cam.ac.uk",
}
),
)
HR_INSTITUTION = IdentifierScheme(
"institution.v1.human-resources.university.identifiers.cam.ac.uk",
"Human Resources Institution",
frozendict(
{
"deprecated": "institution.v1.uhr.identifiers.cam.ac.uk",
}
),
)
LEGACY_CARD_INSTITUTION = IdentifierScheme(
"institution.v1.legacy-card.university.identifiers.cam.ac.uk",
"Legacy Card Institution",
frozendict(
{
"deprecated": "inst.legacy_card.identifiers.admin.cam.ac.uk",
}
),
)
LOOKUP_INSTITUTION = IdentifierScheme(
"insts.lookup.cam.ac.uk", "Lookup Institution", frozendict({})
)
"""
Misc. identifiers.
"""
STUDENT_ACADEMIC_PLAN = IdentifierScheme(
"academic-plan.v1.student-records.university.identifiers.cam.ac.uk",
"Student Academic Plan",
frozendict(
{
"deprecated": "academicPlan.v1.ust.identifiers.cam.ac.uk",
}
),
)
CARD = IdentifierScheme(
"card.v1.card.university.identifiers.cam.ac.uk",
"Card Identifier",
frozendict(
{
"deprecated": "card.card.identifiers.uis.cam.ac.uk",
}
),
)
LEGACY_TEMP_CARD = IdentifierScheme(
"temporary-card.v1.card.university.identifiers.cam.ac.uk",
"Temporary Card Identifier",
frozendict(
{
"deprecated": "temp_id.card.identifiers.uis.cam.ac.uk",
}
),
)
MIFARE_ID = IdentifierScheme(
"mifare-identifier.v1.card.university.identifiers.cam.ac.uk",
"Mifare Identifier",
frozendict(
{
"deprecated": "mifare_id.card.identifiers.uis.cam.ac.uk",
}
),
value_parser=(
lambda v: v.lstrip("0") or "0" # fallback to '0' to avoid stripping '000' to ''
),
)
MIFARE_NUMBER = IdentifierScheme(
"mifare-number.v1.card.university.identifiers.cam.ac.uk",
"Mifare Number",
frozendict(
{
"deprecated": "mifare_number.card.identifiers.uis.cam.ac.uk",
}
),
)
BARCODE = IdentifierScheme(
"barcode.v1.card.university.identifiers.cam.ac.uk",
"Card Barcode",
frozendict(
{
"deprecated": "barcode.identifiers.lib.cam.ac.uk",
}
),
)
CARD_LOGO = IdentifierScheme(
"card-logo.v1.card.university.identifiers.cam.ac.uk",
"Card Logo Identifier",
frozendict(
{
"deprecated": "card_logo.card.identifiers.uis.cam.ac.uk",
}
),
)
PHOTO = IdentifierScheme(
"photo.v1.photo.university.identifiers.cam.ac.uk",
"Photo Identifier",
frozendict(
{
"deprecated": "photo_id.photo.identifiers.uis.cam.ac.uk",
}
),
)
LEGACY_PHOTO = IdentifierScheme(
"photo.v1.legacy-card.university.identifiers.cam.ac.uk",
"Legacy Photo Identifier",
frozendict(
{
"deprecated": "photo.legacy_card.identifiers.admin.cam.ac.uk",
}
),
)
LEGACY_CARD = IdentifierScheme(
"card.v1.legacy-card.university.identifiers.cam.ac.uk",
"Legacy Card Identifier",
frozendict(
{
"deprecated": "card.legacy_card.identifiers.admin.cam.ac.uk",
}
),
)
LOOKUP_GROUP = IdentifierScheme(
"groups.lookup.cam.ac.uk", "Lookup Group Identifier", frozendict({})
)
API_GATEWAY_APPLICATION = IdentifierScheme(
"application.api.apps.cam.ac.uk",
"Application Gateway Application",
frozendict(
{
"development": "apigee-development.devel.api.gcp.uis.cam.ac.uk",
"staging": "apigee-staging.test.api.gcp.uis.cam.ac.uk",
}
),
)
@staticmethod
@lru_cache()
def get_registered_schemes() -> List[IdentifierScheme]:
"""
Returns the list of registered identifier schemes.
"""
return [
prop
for prop in vars(IdentifierSchemes).values()
if (isinstance(prop, IdentifierScheme))
]
@staticmethod
def from_string(identifier_scheme: str, find_by_alias: bool = False):
"""
Return an instance of an identifier scheme from a string representation.
If `find_by_alias` is true identifier schemes will be matched based on
the alias as well as.
"""
matching_scheme = next(
(
scheme
for scheme in IdentifierSchemes.get_registered_schemes()
if (
scheme.identifier == identifier_scheme
or find_by_alias
and identifier_scheme in scheme.aliases.values()
)
),
None,
)
if not matching_scheme:
raise ValueError(f"Invalid identifier scheme {identifier_scheme}")
return matching_scheme
class Identifier(namedtuple("Identifier", ["value", "scheme"])):
"""
A representation of an identifer, in the form of 'value' and 'scheme'
"""
@staticmethod
def from_string(
value: str,
*,
fallback_scheme: Optional[IdentifierScheme] = None,
find_by_alias: bool = False,
) -> "Identifier":
"""
Parse a `<value>@<scheme>` string into an identifier pair.
"""
parsed_value: Optional[str] = None
scheme: Optional[IdentifierScheme] = None
parts = value.split("@")
if len(parts) == 2:
parsed_value = parts[0]
scheme = IdentifierSchemes.from_string(parts[1], find_by_alias)
elif len(parts) == 1 and fallback_scheme is not None:
parsed_value = value
scheme = fallback_scheme
else:
raise ValueError(f"Invalid identifier {value}")
parsed_value = scheme.value_parser(parsed_value) if scheme.value_parser else parsed_value
if str(scheme).lower() == str(IdentifierSchemes.CRSID).lower():
return Identifier(parsed_value.lower(), scheme)
return Identifier(parsed_value, scheme)
def __str__(self):
"""
Parse an identifier back to string form.
"""
# Always deal with identifiers in lower case
# The case of the identifier used does not matter when calling Lookup or the Card API
# but when using identifiers as keys within dicts we should ensure that we don't
# accidentally create duplicates by having identifiers in mixed cases
return f"{self.value}@{self.scheme}".lower()
CRSID_SCHEME = str(IdentifierSchemes.CRSID)
RETENTION_PERIOD = 2 * 366 # 2 leap years in days
|
PypiClean
|
/cdktf_cdktf_provider_snowflake-9.0.0-py3-none-any.whl/cdktf_cdktf_provider_snowflake/grant_privileges_to_role/__init__.py
|
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class GrantPrivilegesToRole(
_cdktf_9a9027ec.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRole",
):
'''Represents a {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role snowflake_grant_privileges_to_role}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
role_name: builtins.str,
all_privileges: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
id: typing.Optional[builtins.str] = None,
on_account: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
on_account_object: typing.Optional[typing.Union["GrantPrivilegesToRoleOnAccountObject", typing.Dict[builtins.str, typing.Any]]] = None,
on_schema: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchema", typing.Dict[builtins.str, typing.Any]]] = None,
on_schema_object: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObject", typing.Dict[builtins.str, typing.Any]]] = None,
privileges: typing.Optional[typing.Sequence[builtins.str]] = None,
with_grant_option: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role snowflake_grant_privileges_to_role} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param role_name: The fully qualified name of the role to which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#role_name GrantPrivilegesToRole#role_name}
:param all_privileges: Grant all privileges on the account role. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_privileges GrantPrivilegesToRole#all_privileges}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#id GrantPrivilegesToRole#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param on_account: If true, the privileges will be granted on the account. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account GrantPrivilegesToRole#on_account}
:param on_account_object: on_account_object block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account_object GrantPrivilegesToRole#on_account_object}
:param on_schema: on_schema block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema GrantPrivilegesToRole#on_schema}
:param on_schema_object: on_schema_object block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema_object GrantPrivilegesToRole#on_schema_object}
:param privileges: The privileges to grant on the account role. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#privileges GrantPrivilegesToRole#privileges}
:param with_grant_option: Specifies whether the grantee can grant the privileges to other users. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#with_grant_option GrantPrivilegesToRole#with_grant_option}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__df9b6bac89d7b7dd746aef9ebbff86facffbdbd9cbbcc1b272c36297b61ff96b)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = GrantPrivilegesToRoleConfig(
role_name=role_name,
all_privileges=all_privileges,
id=id,
on_account=on_account,
on_account_object=on_account_object,
on_schema=on_schema,
on_schema_object=on_schema_object,
privileges=privileges,
with_grant_option=with_grant_option,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putOnAccountObject")
def put_on_account_object(
self,
*,
object_name: builtins.str,
object_type: builtins.str,
) -> None:
'''
:param object_name: The fully qualified name of the object on which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
:param object_type: The object type of the account object on which privileges will be granted. Valid values are: USER | RESOURCE MONITOR | WAREHOUSE | DATABASE | INTEGRATION | FAILOVER GROUP | REPLICATION GROUP Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
value = GrantPrivilegesToRoleOnAccountObject(
object_name=object_name, object_type=object_type
)
return typing.cast(None, jsii.invoke(self, "putOnAccountObject", [value]))
@jsii.member(jsii_name="putOnSchema")
def put_on_schema(
self,
*,
all_schemas_in_database: typing.Optional[builtins.str] = None,
future_schemas_in_database: typing.Optional[builtins.str] = None,
schema_name: typing.Optional[builtins.str] = None,
) -> None:
'''
:param all_schemas_in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_schemas_in_database GrantPrivilegesToRole#all_schemas_in_database}
:param future_schemas_in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future_schemas_in_database GrantPrivilegesToRole#future_schemas_in_database}
:param schema_name: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#schema_name GrantPrivilegesToRole#schema_name}
'''
value = GrantPrivilegesToRoleOnSchema(
all_schemas_in_database=all_schemas_in_database,
future_schemas_in_database=future_schemas_in_database,
schema_name=schema_name,
)
return typing.cast(None, jsii.invoke(self, "putOnSchema", [value]))
@jsii.member(jsii_name="putOnSchemaObject")
def put_on_schema_object(
self,
*,
all: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObjectAll", typing.Dict[builtins.str, typing.Any]]] = None,
future: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObjectFuture", typing.Dict[builtins.str, typing.Any]]] = None,
object_name: typing.Optional[builtins.str] = None,
object_type: typing.Optional[builtins.str] = None,
) -> None:
'''
:param all: all block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all GrantPrivilegesToRole#all}
:param future: future block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future GrantPrivilegesToRole#future}
:param object_name: The fully qualified name of the object on which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
:param object_type: The object type of the schema object on which privileges will be granted. Valid values are: ALERT | EVENT TABLE | FILE FORMAT | FUNCTION | PROCEDURE | SECRET | SEQUENCE | PIPE | MASKING POLICY | PASSWORD POLICY | ROW ACCESS POLICY | SESSION POLICY | TAG | STAGE | STREAM | TABLE | EXTERNAL TABLE | TASK | VIEW | MATERIALIZED VIEW Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
value = GrantPrivilegesToRoleOnSchemaObject(
all=all, future=future, object_name=object_name, object_type=object_type
)
return typing.cast(None, jsii.invoke(self, "putOnSchemaObject", [value]))
@jsii.member(jsii_name="resetAllPrivileges")
def reset_all_privileges(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAllPrivileges", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetOnAccount")
def reset_on_account(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetOnAccount", []))
@jsii.member(jsii_name="resetOnAccountObject")
def reset_on_account_object(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetOnAccountObject", []))
@jsii.member(jsii_name="resetOnSchema")
def reset_on_schema(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetOnSchema", []))
@jsii.member(jsii_name="resetOnSchemaObject")
def reset_on_schema_object(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetOnSchemaObject", []))
@jsii.member(jsii_name="resetPrivileges")
def reset_privileges(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetPrivileges", []))
@jsii.member(jsii_name="resetWithGrantOption")
def reset_with_grant_option(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetWithGrantOption", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="onAccountObject")
def on_account_object(
self,
) -> "GrantPrivilegesToRoleOnAccountObjectOutputReference":
return typing.cast("GrantPrivilegesToRoleOnAccountObjectOutputReference", jsii.get(self, "onAccountObject"))
@builtins.property
@jsii.member(jsii_name="onSchema")
def on_schema(self) -> "GrantPrivilegesToRoleOnSchemaOutputReference":
return typing.cast("GrantPrivilegesToRoleOnSchemaOutputReference", jsii.get(self, "onSchema"))
@builtins.property
@jsii.member(jsii_name="onSchemaObject")
def on_schema_object(self) -> "GrantPrivilegesToRoleOnSchemaObjectOutputReference":
return typing.cast("GrantPrivilegesToRoleOnSchemaObjectOutputReference", jsii.get(self, "onSchemaObject"))
@builtins.property
@jsii.member(jsii_name="allPrivilegesInput")
def all_privileges_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "allPrivilegesInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="onAccountInput")
def on_account_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "onAccountInput"))
@builtins.property
@jsii.member(jsii_name="onAccountObjectInput")
def on_account_object_input(
self,
) -> typing.Optional["GrantPrivilegesToRoleOnAccountObject"]:
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnAccountObject"], jsii.get(self, "onAccountObjectInput"))
@builtins.property
@jsii.member(jsii_name="onSchemaInput")
def on_schema_input(self) -> typing.Optional["GrantPrivilegesToRoleOnSchema"]:
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchema"], jsii.get(self, "onSchemaInput"))
@builtins.property
@jsii.member(jsii_name="onSchemaObjectInput")
def on_schema_object_input(
self,
) -> typing.Optional["GrantPrivilegesToRoleOnSchemaObject"]:
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchemaObject"], jsii.get(self, "onSchemaObjectInput"))
@builtins.property
@jsii.member(jsii_name="privilegesInput")
def privileges_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "privilegesInput"))
@builtins.property
@jsii.member(jsii_name="roleNameInput")
def role_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "roleNameInput"))
@builtins.property
@jsii.member(jsii_name="withGrantOptionInput")
def with_grant_option_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "withGrantOptionInput"))
@builtins.property
@jsii.member(jsii_name="allPrivileges")
def all_privileges(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "allPrivileges"))
@all_privileges.setter
def all_privileges(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ae2d0fb93445e52b92165c6a295ba8250d2313624055eab84a10146d174ba01c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "allPrivileges", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8501f18eab60fb6ab7783788c55cd870cdc25c8c85764ff33e07efd01e65d2d7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="onAccount")
def on_account(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "onAccount"))
@on_account.setter
def on_account(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ad8485be078d691a540591323a275a507a034eec5afb40342cc2946a4923a190)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "onAccount", value)
@builtins.property
@jsii.member(jsii_name="privileges")
def privileges(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "privileges"))
@privileges.setter
def privileges(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__99a08e24e5d78c580d7e8493388217245f932fdca73c51ed949542040b227ba3)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "privileges", value)
@builtins.property
@jsii.member(jsii_name="roleName")
def role_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "roleName"))
@role_name.setter
def role_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7c3fc6e1b88460e0bcd4b85a1d51f18504928917047070ed7149f014ba7068a0)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "roleName", value)
@builtins.property
@jsii.member(jsii_name="withGrantOption")
def with_grant_option(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "withGrantOption"))
@with_grant_option.setter
def with_grant_option(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1498c1b7fa3eab727be362fbfdc0bad741b1a50b92c9b990ce2adb708c37565c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "withGrantOption", value)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"role_name": "roleName",
"all_privileges": "allPrivileges",
"id": "id",
"on_account": "onAccount",
"on_account_object": "onAccountObject",
"on_schema": "onSchema",
"on_schema_object": "onSchemaObject",
"privileges": "privileges",
"with_grant_option": "withGrantOption",
},
)
class GrantPrivilegesToRoleConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
role_name: builtins.str,
all_privileges: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
id: typing.Optional[builtins.str] = None,
on_account: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
on_account_object: typing.Optional[typing.Union["GrantPrivilegesToRoleOnAccountObject", typing.Dict[builtins.str, typing.Any]]] = None,
on_schema: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchema", typing.Dict[builtins.str, typing.Any]]] = None,
on_schema_object: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObject", typing.Dict[builtins.str, typing.Any]]] = None,
privileges: typing.Optional[typing.Sequence[builtins.str]] = None,
with_grant_option: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param role_name: The fully qualified name of the role to which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#role_name GrantPrivilegesToRole#role_name}
:param all_privileges: Grant all privileges on the account role. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_privileges GrantPrivilegesToRole#all_privileges}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#id GrantPrivilegesToRole#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param on_account: If true, the privileges will be granted on the account. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account GrantPrivilegesToRole#on_account}
:param on_account_object: on_account_object block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account_object GrantPrivilegesToRole#on_account_object}
:param on_schema: on_schema block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema GrantPrivilegesToRole#on_schema}
:param on_schema_object: on_schema_object block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema_object GrantPrivilegesToRole#on_schema_object}
:param privileges: The privileges to grant on the account role. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#privileges GrantPrivilegesToRole#privileges}
:param with_grant_option: Specifies whether the grantee can grant the privileges to other users. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#with_grant_option GrantPrivilegesToRole#with_grant_option}
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if isinstance(on_account_object, dict):
on_account_object = GrantPrivilegesToRoleOnAccountObject(**on_account_object)
if isinstance(on_schema, dict):
on_schema = GrantPrivilegesToRoleOnSchema(**on_schema)
if isinstance(on_schema_object, dict):
on_schema_object = GrantPrivilegesToRoleOnSchemaObject(**on_schema_object)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6a5ee16345c8b0c6f3c0384a76deedd8d755a73f999a1c40ba515563077a3514)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument role_name", value=role_name, expected_type=type_hints["role_name"])
check_type(argname="argument all_privileges", value=all_privileges, expected_type=type_hints["all_privileges"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument on_account", value=on_account, expected_type=type_hints["on_account"])
check_type(argname="argument on_account_object", value=on_account_object, expected_type=type_hints["on_account_object"])
check_type(argname="argument on_schema", value=on_schema, expected_type=type_hints["on_schema"])
check_type(argname="argument on_schema_object", value=on_schema_object, expected_type=type_hints["on_schema_object"])
check_type(argname="argument privileges", value=privileges, expected_type=type_hints["privileges"])
check_type(argname="argument with_grant_option", value=with_grant_option, expected_type=type_hints["with_grant_option"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"role_name": role_name,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if all_privileges is not None:
self._values["all_privileges"] = all_privileges
if id is not None:
self._values["id"] = id
if on_account is not None:
self._values["on_account"] = on_account
if on_account_object is not None:
self._values["on_account_object"] = on_account_object
if on_schema is not None:
self._values["on_schema"] = on_schema
if on_schema_object is not None:
self._values["on_schema_object"] = on_schema_object
if privileges is not None:
self._values["privileges"] = privileges
if with_grant_option is not None:
self._values["with_grant_option"] = with_grant_option
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def role_name(self) -> builtins.str:
'''The fully qualified name of the role to which privileges will be granted.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#role_name GrantPrivilegesToRole#role_name}
'''
result = self._values.get("role_name")
assert result is not None, "Required property 'role_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def all_privileges(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Grant all privileges on the account role.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_privileges GrantPrivilegesToRole#all_privileges}
'''
result = self._values.get("all_privileges")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#id GrantPrivilegesToRole#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def on_account(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''If true, the privileges will be granted on the account.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account GrantPrivilegesToRole#on_account}
'''
result = self._values.get("on_account")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def on_account_object(
self,
) -> typing.Optional["GrantPrivilegesToRoleOnAccountObject"]:
'''on_account_object block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_account_object GrantPrivilegesToRole#on_account_object}
'''
result = self._values.get("on_account_object")
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnAccountObject"], result)
@builtins.property
def on_schema(self) -> typing.Optional["GrantPrivilegesToRoleOnSchema"]:
'''on_schema block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema GrantPrivilegesToRole#on_schema}
'''
result = self._values.get("on_schema")
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchema"], result)
@builtins.property
def on_schema_object(
self,
) -> typing.Optional["GrantPrivilegesToRoleOnSchemaObject"]:
'''on_schema_object block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#on_schema_object GrantPrivilegesToRole#on_schema_object}
'''
result = self._values.get("on_schema_object")
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchemaObject"], result)
@builtins.property
def privileges(self) -> typing.Optional[typing.List[builtins.str]]:
'''The privileges to grant on the account role.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#privileges GrantPrivilegesToRole#privileges}
'''
result = self._values.get("privileges")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def with_grant_option(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Specifies whether the grantee can grant the privileges to other users.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#with_grant_option GrantPrivilegesToRole#with_grant_option}
'''
result = self._values.get("with_grant_option")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnAccountObject",
jsii_struct_bases=[],
name_mapping={"object_name": "objectName", "object_type": "objectType"},
)
class GrantPrivilegesToRoleOnAccountObject:
def __init__(self, *, object_name: builtins.str, object_type: builtins.str) -> None:
'''
:param object_name: The fully qualified name of the object on which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
:param object_type: The object type of the account object on which privileges will be granted. Valid values are: USER | RESOURCE MONITOR | WAREHOUSE | DATABASE | INTEGRATION | FAILOVER GROUP | REPLICATION GROUP Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7e2b940dca629c72f603127fe6a29409e15e2652a00195bd24f1a71e719057e2)
check_type(argname="argument object_name", value=object_name, expected_type=type_hints["object_name"])
check_type(argname="argument object_type", value=object_type, expected_type=type_hints["object_type"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"object_name": object_name,
"object_type": object_type,
}
@builtins.property
def object_name(self) -> builtins.str:
'''The fully qualified name of the object on which privileges will be granted.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
'''
result = self._values.get("object_name")
assert result is not None, "Required property 'object_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def object_type(self) -> builtins.str:
'''The object type of the account object on which privileges will be granted.
Valid values are: USER | RESOURCE MONITOR | WAREHOUSE | DATABASE | INTEGRATION | FAILOVER GROUP | REPLICATION GROUP
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
result = self._values.get("object_type")
assert result is not None, "Required property 'object_type' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleOnAccountObject(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class GrantPrivilegesToRoleOnAccountObjectOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnAccountObjectOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__04b50230cccb35126081e3572b5f1c597bc59735fa7523942670edb722bb79d9)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@builtins.property
@jsii.member(jsii_name="objectNameInput")
def object_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectNameInput"))
@builtins.property
@jsii.member(jsii_name="objectTypeInput")
def object_type_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectTypeInput"))
@builtins.property
@jsii.member(jsii_name="objectName")
def object_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectName"))
@object_name.setter
def object_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3b182b48843436ead4ac2a8e07bfe08b6034b475bb6614d3c9c2362a17f14543)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectName", value)
@builtins.property
@jsii.member(jsii_name="objectType")
def object_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectType"))
@object_type.setter
def object_type(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__76fc8587e6d9b2238cfd63640e9b8e85423aa05f80aad5d7a3a57fd3f1a67fef)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectType", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[GrantPrivilegesToRoleOnAccountObject]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnAccountObject], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GrantPrivilegesToRoleOnAccountObject],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e2d74bb9e5fcab7bd1dcc8cf0a91f524a2d695a3f51786e135cc7f4e8e62d313)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchema",
jsii_struct_bases=[],
name_mapping={
"all_schemas_in_database": "allSchemasInDatabase",
"future_schemas_in_database": "futureSchemasInDatabase",
"schema_name": "schemaName",
},
)
class GrantPrivilegesToRoleOnSchema:
def __init__(
self,
*,
all_schemas_in_database: typing.Optional[builtins.str] = None,
future_schemas_in_database: typing.Optional[builtins.str] = None,
schema_name: typing.Optional[builtins.str] = None,
) -> None:
'''
:param all_schemas_in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_schemas_in_database GrantPrivilegesToRole#all_schemas_in_database}
:param future_schemas_in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future_schemas_in_database GrantPrivilegesToRole#future_schemas_in_database}
:param schema_name: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#schema_name GrantPrivilegesToRole#schema_name}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ce19377181accfbda92f741a0364170e08f650d7dc9ffc12bcf1337d71093c9e)
check_type(argname="argument all_schemas_in_database", value=all_schemas_in_database, expected_type=type_hints["all_schemas_in_database"])
check_type(argname="argument future_schemas_in_database", value=future_schemas_in_database, expected_type=type_hints["future_schemas_in_database"])
check_type(argname="argument schema_name", value=schema_name, expected_type=type_hints["schema_name"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if all_schemas_in_database is not None:
self._values["all_schemas_in_database"] = all_schemas_in_database
if future_schemas_in_database is not None:
self._values["future_schemas_in_database"] = future_schemas_in_database
if schema_name is not None:
self._values["schema_name"] = schema_name
@builtins.property
def all_schemas_in_database(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the database.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all_schemas_in_database GrantPrivilegesToRole#all_schemas_in_database}
'''
result = self._values.get("all_schemas_in_database")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def future_schemas_in_database(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the database.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future_schemas_in_database GrantPrivilegesToRole#future_schemas_in_database}
'''
result = self._values.get("future_schemas_in_database")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def schema_name(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the schema.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#schema_name GrantPrivilegesToRole#schema_name}
'''
result = self._values.get("schema_name")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleOnSchema(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObject",
jsii_struct_bases=[],
name_mapping={
"all": "all",
"future": "future",
"object_name": "objectName",
"object_type": "objectType",
},
)
class GrantPrivilegesToRoleOnSchemaObject:
def __init__(
self,
*,
all: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObjectAll", typing.Dict[builtins.str, typing.Any]]] = None,
future: typing.Optional[typing.Union["GrantPrivilegesToRoleOnSchemaObjectFuture", typing.Dict[builtins.str, typing.Any]]] = None,
object_name: typing.Optional[builtins.str] = None,
object_type: typing.Optional[builtins.str] = None,
) -> None:
'''
:param all: all block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all GrantPrivilegesToRole#all}
:param future: future block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future GrantPrivilegesToRole#future}
:param object_name: The fully qualified name of the object on which privileges will be granted. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
:param object_type: The object type of the schema object on which privileges will be granted. Valid values are: ALERT | EVENT TABLE | FILE FORMAT | FUNCTION | PROCEDURE | SECRET | SEQUENCE | PIPE | MASKING POLICY | PASSWORD POLICY | ROW ACCESS POLICY | SESSION POLICY | TAG | STAGE | STREAM | TABLE | EXTERNAL TABLE | TASK | VIEW | MATERIALIZED VIEW Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
if isinstance(all, dict):
all = GrantPrivilegesToRoleOnSchemaObjectAll(**all)
if isinstance(future, dict):
future = GrantPrivilegesToRoleOnSchemaObjectFuture(**future)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__49a2c95a580eaf0ee6738063936b4fef69c97fede091faa3ca0214c9e97b79f8)
check_type(argname="argument all", value=all, expected_type=type_hints["all"])
check_type(argname="argument future", value=future, expected_type=type_hints["future"])
check_type(argname="argument object_name", value=object_name, expected_type=type_hints["object_name"])
check_type(argname="argument object_type", value=object_type, expected_type=type_hints["object_type"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if all is not None:
self._values["all"] = all
if future is not None:
self._values["future"] = future
if object_name is not None:
self._values["object_name"] = object_name
if object_type is not None:
self._values["object_type"] = object_type
@builtins.property
def all(self) -> typing.Optional["GrantPrivilegesToRoleOnSchemaObjectAll"]:
'''all block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#all GrantPrivilegesToRole#all}
'''
result = self._values.get("all")
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchemaObjectAll"], result)
@builtins.property
def future(self) -> typing.Optional["GrantPrivilegesToRoleOnSchemaObjectFuture"]:
'''future block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#future GrantPrivilegesToRole#future}
'''
result = self._values.get("future")
return typing.cast(typing.Optional["GrantPrivilegesToRoleOnSchemaObjectFuture"], result)
@builtins.property
def object_name(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the object on which privileges will be granted.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_name GrantPrivilegesToRole#object_name}
'''
result = self._values.get("object_name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def object_type(self) -> typing.Optional[builtins.str]:
'''The object type of the schema object on which privileges will be granted.
Valid values are: ALERT | EVENT TABLE | FILE FORMAT | FUNCTION | PROCEDURE | SECRET | SEQUENCE | PIPE | MASKING POLICY | PASSWORD POLICY | ROW ACCESS POLICY | SESSION POLICY | TAG | STAGE | STREAM | TABLE | EXTERNAL TABLE | TASK | VIEW | MATERIALIZED VIEW
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type GrantPrivilegesToRole#object_type}
'''
result = self._values.get("object_type")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleOnSchemaObject(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObjectAll",
jsii_struct_bases=[],
name_mapping={
"object_type_plural": "objectTypePlural",
"in_database": "inDatabase",
"in_schema": "inSchema",
},
)
class GrantPrivilegesToRoleOnSchemaObjectAll:
def __init__(
self,
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
'''
:param object_type_plural: The plural object type of the schema object on which privileges will be granted. Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
:param in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
:param in_schema: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5a78345955dd2a438762ae789a5161393c47155a724d3f7031b9d187b7a38aca)
check_type(argname="argument object_type_plural", value=object_type_plural, expected_type=type_hints["object_type_plural"])
check_type(argname="argument in_database", value=in_database, expected_type=type_hints["in_database"])
check_type(argname="argument in_schema", value=in_schema, expected_type=type_hints["in_schema"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"object_type_plural": object_type_plural,
}
if in_database is not None:
self._values["in_database"] = in_database
if in_schema is not None:
self._values["in_schema"] = in_schema
@builtins.property
def object_type_plural(self) -> builtins.str:
'''The plural object type of the schema object on which privileges will be granted.
Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
'''
result = self._values.get("object_type_plural")
assert result is not None, "Required property 'object_type_plural' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def in_database(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the database.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
'''
result = self._values.get("in_database")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def in_schema(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the schema.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
result = self._values.get("in_schema")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleOnSchemaObjectAll(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class GrantPrivilegesToRoleOnSchemaObjectAllOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObjectAllOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__91b52dbcb08941435a4d64a4078887926b61df6a885912b999aa2dba57c4ba3a)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetInDatabase")
def reset_in_database(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInDatabase", []))
@jsii.member(jsii_name="resetInSchema")
def reset_in_schema(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInSchema", []))
@builtins.property
@jsii.member(jsii_name="inDatabaseInput")
def in_database_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "inDatabaseInput"))
@builtins.property
@jsii.member(jsii_name="inSchemaInput")
def in_schema_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "inSchemaInput"))
@builtins.property
@jsii.member(jsii_name="objectTypePluralInput")
def object_type_plural_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectTypePluralInput"))
@builtins.property
@jsii.member(jsii_name="inDatabase")
def in_database(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "inDatabase"))
@in_database.setter
def in_database(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5988d10133134091f19761ea487cd1306ef84b23a0573e386806c8e3b970e26d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "inDatabase", value)
@builtins.property
@jsii.member(jsii_name="inSchema")
def in_schema(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "inSchema"))
@in_schema.setter
def in_schema(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ffd6066af8b0c1195500de025d1940ced1ac7890d7b836d0ab8f7c6f0ebe0e09)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "inSchema", value)
@builtins.property
@jsii.member(jsii_name="objectTypePlural")
def object_type_plural(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectTypePlural"))
@object_type_plural.setter
def object_type_plural(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__06ab70d0b240379a72a9d0a97ea2644edf5bfc1a9dfb0539d6fe9cc72d5152ea)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectTypePlural", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c919a2915e109109fc271c4aae63b898a7ddc76852ca1b4622f035831274ce3f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObjectFuture",
jsii_struct_bases=[],
name_mapping={
"object_type_plural": "objectTypePlural",
"in_database": "inDatabase",
"in_schema": "inSchema",
},
)
class GrantPrivilegesToRoleOnSchemaObjectFuture:
def __init__(
self,
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
'''
:param object_type_plural: The plural object type of the schema object on which privileges will be granted. Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
:param in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
:param in_schema: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bebb066b98cbf145907bc2aad286413d5a9af2e5923bf69482efed25e1265763)
check_type(argname="argument object_type_plural", value=object_type_plural, expected_type=type_hints["object_type_plural"])
check_type(argname="argument in_database", value=in_database, expected_type=type_hints["in_database"])
check_type(argname="argument in_schema", value=in_schema, expected_type=type_hints["in_schema"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"object_type_plural": object_type_plural,
}
if in_database is not None:
self._values["in_database"] = in_database
if in_schema is not None:
self._values["in_schema"] = in_schema
@builtins.property
def object_type_plural(self) -> builtins.str:
'''The plural object type of the schema object on which privileges will be granted.
Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
'''
result = self._values.get("object_type_plural")
assert result is not None, "Required property 'object_type_plural' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def in_database(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the database.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
'''
result = self._values.get("in_database")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def in_schema(self) -> typing.Optional[builtins.str]:
'''The fully qualified name of the schema.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
result = self._values.get("in_schema")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GrantPrivilegesToRoleOnSchemaObjectFuture(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class GrantPrivilegesToRoleOnSchemaObjectFutureOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObjectFutureOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__94d27973acbbfc48efc6278f590b2d0f6fd2c662d6cc3dfe68b0a552498b33df)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetInDatabase")
def reset_in_database(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInDatabase", []))
@jsii.member(jsii_name="resetInSchema")
def reset_in_schema(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInSchema", []))
@builtins.property
@jsii.member(jsii_name="inDatabaseInput")
def in_database_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "inDatabaseInput"))
@builtins.property
@jsii.member(jsii_name="inSchemaInput")
def in_schema_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "inSchemaInput"))
@builtins.property
@jsii.member(jsii_name="objectTypePluralInput")
def object_type_plural_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectTypePluralInput"))
@builtins.property
@jsii.member(jsii_name="inDatabase")
def in_database(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "inDatabase"))
@in_database.setter
def in_database(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4cf29d2a89db7765549ca4a2f735982395dfd731b2c7da88be3f5f23da2e544f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "inDatabase", value)
@builtins.property
@jsii.member(jsii_name="inSchema")
def in_schema(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "inSchema"))
@in_schema.setter
def in_schema(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a2242ae018689f76b3afe88d3ab2e8833f11b087de69e05de9ddef722ba34074)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "inSchema", value)
@builtins.property
@jsii.member(jsii_name="objectTypePlural")
def object_type_plural(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectTypePlural"))
@object_type_plural.setter
def object_type_plural(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0a1784a67a66ed16fd66a7fb2ba1117c858f57dabdf4d4ed68d00a8f2db84567)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectTypePlural", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__972ae2e364890acdbadafbf89145bf5ad9c87c2cfd97a054f403bd8eb865e913)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class GrantPrivilegesToRoleOnSchemaObjectOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaObjectOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c8e83d6cd40736f9207e84cc72c752ed7018db58eb74c116f38759914ce89588)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="putAll")
def put_all(
self,
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
'''
:param object_type_plural: The plural object type of the schema object on which privileges will be granted. Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
:param in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
:param in_schema: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
value = GrantPrivilegesToRoleOnSchemaObjectAll(
object_type_plural=object_type_plural,
in_database=in_database,
in_schema=in_schema,
)
return typing.cast(None, jsii.invoke(self, "putAll", [value]))
@jsii.member(jsii_name="putFuture")
def put_future(
self,
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
'''
:param object_type_plural: The plural object type of the schema object on which privileges will be granted. Valid values are: ALERTS | EVENT TABLES | FILE FORMATS | FUNCTIONS | PROCEDURES | SECRETS | SEQUENCES | PIPES | MASKING POLICIES | PASSWORD POLICIES | ROW ACCESS POLICIES | SESSION POLICIES | TAGS | STAGES | STREAMS | TABLES | EXTERNAL TABLES | TASKS | VIEWS | MATERIALIZED VIEWS Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#object_type_plural GrantPrivilegesToRole#object_type_plural}
:param in_database: The fully qualified name of the database. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_database GrantPrivilegesToRole#in_database}
:param in_schema: The fully qualified name of the schema. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/snowflake-labs/snowflake/0.70.0/docs/resources/grant_privileges_to_role#in_schema GrantPrivilegesToRole#in_schema}
'''
value = GrantPrivilegesToRoleOnSchemaObjectFuture(
object_type_plural=object_type_plural,
in_database=in_database,
in_schema=in_schema,
)
return typing.cast(None, jsii.invoke(self, "putFuture", [value]))
@jsii.member(jsii_name="resetAll")
def reset_all(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAll", []))
@jsii.member(jsii_name="resetFuture")
def reset_future(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFuture", []))
@jsii.member(jsii_name="resetObjectName")
def reset_object_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetObjectName", []))
@jsii.member(jsii_name="resetObjectType")
def reset_object_type(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetObjectType", []))
@builtins.property
@jsii.member(jsii_name="all")
def all(self) -> GrantPrivilegesToRoleOnSchemaObjectAllOutputReference:
return typing.cast(GrantPrivilegesToRoleOnSchemaObjectAllOutputReference, jsii.get(self, "all"))
@builtins.property
@jsii.member(jsii_name="future")
def future(self) -> GrantPrivilegesToRoleOnSchemaObjectFutureOutputReference:
return typing.cast(GrantPrivilegesToRoleOnSchemaObjectFutureOutputReference, jsii.get(self, "future"))
@builtins.property
@jsii.member(jsii_name="allInput")
def all_input(self) -> typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll], jsii.get(self, "allInput"))
@builtins.property
@jsii.member(jsii_name="futureInput")
def future_input(
self,
) -> typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture], jsii.get(self, "futureInput"))
@builtins.property
@jsii.member(jsii_name="objectNameInput")
def object_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectNameInput"))
@builtins.property
@jsii.member(jsii_name="objectTypeInput")
def object_type_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "objectTypeInput"))
@builtins.property
@jsii.member(jsii_name="objectName")
def object_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectName"))
@object_name.setter
def object_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b35e3c5337bed8f57f685a1ba20743248e5012fe6cad62ad9084994b349e934d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectName", value)
@builtins.property
@jsii.member(jsii_name="objectType")
def object_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "objectType"))
@object_type.setter
def object_type(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d2f0810f0321e585ddc691b620d8457d63489c59904dc0ae7e0f51f2a37af191)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "objectType", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[GrantPrivilegesToRoleOnSchemaObject]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchemaObject], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObject],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ab012cbcbfb24d462cc9243a13719d703264fe6703385a7453575606e73fd794)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class GrantPrivilegesToRoleOnSchemaOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-snowflake.grantPrivilegesToRole.GrantPrivilegesToRoleOnSchemaOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ee31efba5e60af4723ca7667d32503ae34df8d3791464bf32e6d27cdb357ff03)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetAllSchemasInDatabase")
def reset_all_schemas_in_database(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAllSchemasInDatabase", []))
@jsii.member(jsii_name="resetFutureSchemasInDatabase")
def reset_future_schemas_in_database(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFutureSchemasInDatabase", []))
@jsii.member(jsii_name="resetSchemaName")
def reset_schema_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSchemaName", []))
@builtins.property
@jsii.member(jsii_name="allSchemasInDatabaseInput")
def all_schemas_in_database_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "allSchemasInDatabaseInput"))
@builtins.property
@jsii.member(jsii_name="futureSchemasInDatabaseInput")
def future_schemas_in_database_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "futureSchemasInDatabaseInput"))
@builtins.property
@jsii.member(jsii_name="schemaNameInput")
def schema_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "schemaNameInput"))
@builtins.property
@jsii.member(jsii_name="allSchemasInDatabase")
def all_schemas_in_database(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "allSchemasInDatabase"))
@all_schemas_in_database.setter
def all_schemas_in_database(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__882de92b3793c0bb0d2e7563fa4f3746f3b451700d4492e6c8516911234cd285)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "allSchemasInDatabase", value)
@builtins.property
@jsii.member(jsii_name="futureSchemasInDatabase")
def future_schemas_in_database(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "futureSchemasInDatabase"))
@future_schemas_in_database.setter
def future_schemas_in_database(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3576a871f8f87e60aede310ea2dadfc47a3cc0028bb7b9187855f6659e499a25)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "futureSchemasInDatabase", value)
@builtins.property
@jsii.member(jsii_name="schemaName")
def schema_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "schemaName"))
@schema_name.setter
def schema_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cf341a0afaa542dd30aca7ec4e443f71099bf8b86a477bfdbf8acc02284b4038)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "schemaName", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[GrantPrivilegesToRoleOnSchema]:
return typing.cast(typing.Optional[GrantPrivilegesToRoleOnSchema], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GrantPrivilegesToRoleOnSchema],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__19f1218636c7b861b319928bfdcfd6d7fe778ba8bedad7480e8c611cafc06449)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"GrantPrivilegesToRole",
"GrantPrivilegesToRoleConfig",
"GrantPrivilegesToRoleOnAccountObject",
"GrantPrivilegesToRoleOnAccountObjectOutputReference",
"GrantPrivilegesToRoleOnSchema",
"GrantPrivilegesToRoleOnSchemaObject",
"GrantPrivilegesToRoleOnSchemaObjectAll",
"GrantPrivilegesToRoleOnSchemaObjectAllOutputReference",
"GrantPrivilegesToRoleOnSchemaObjectFuture",
"GrantPrivilegesToRoleOnSchemaObjectFutureOutputReference",
"GrantPrivilegesToRoleOnSchemaObjectOutputReference",
"GrantPrivilegesToRoleOnSchemaOutputReference",
]
publication.publish()
def _typecheckingstub__df9b6bac89d7b7dd746aef9ebbff86facffbdbd9cbbcc1b272c36297b61ff96b(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
role_name: builtins.str,
all_privileges: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
id: typing.Optional[builtins.str] = None,
on_account: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
on_account_object: typing.Optional[typing.Union[GrantPrivilegesToRoleOnAccountObject, typing.Dict[builtins.str, typing.Any]]] = None,
on_schema: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchema, typing.Dict[builtins.str, typing.Any]]] = None,
on_schema_object: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchemaObject, typing.Dict[builtins.str, typing.Any]]] = None,
privileges: typing.Optional[typing.Sequence[builtins.str]] = None,
with_grant_option: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ae2d0fb93445e52b92165c6a295ba8250d2313624055eab84a10146d174ba01c(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8501f18eab60fb6ab7783788c55cd870cdc25c8c85764ff33e07efd01e65d2d7(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ad8485be078d691a540591323a275a507a034eec5afb40342cc2946a4923a190(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__99a08e24e5d78c580d7e8493388217245f932fdca73c51ed949542040b227ba3(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7c3fc6e1b88460e0bcd4b85a1d51f18504928917047070ed7149f014ba7068a0(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1498c1b7fa3eab727be362fbfdc0bad741b1a50b92c9b990ce2adb708c37565c(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6a5ee16345c8b0c6f3c0384a76deedd8d755a73f999a1c40ba515563077a3514(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
role_name: builtins.str,
all_privileges: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
id: typing.Optional[builtins.str] = None,
on_account: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
on_account_object: typing.Optional[typing.Union[GrantPrivilegesToRoleOnAccountObject, typing.Dict[builtins.str, typing.Any]]] = None,
on_schema: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchema, typing.Dict[builtins.str, typing.Any]]] = None,
on_schema_object: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchemaObject, typing.Dict[builtins.str, typing.Any]]] = None,
privileges: typing.Optional[typing.Sequence[builtins.str]] = None,
with_grant_option: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7e2b940dca629c72f603127fe6a29409e15e2652a00195bd24f1a71e719057e2(
*,
object_name: builtins.str,
object_type: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__04b50230cccb35126081e3572b5f1c597bc59735fa7523942670edb722bb79d9(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3b182b48843436ead4ac2a8e07bfe08b6034b475bb6614d3c9c2362a17f14543(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__76fc8587e6d9b2238cfd63640e9b8e85423aa05f80aad5d7a3a57fd3f1a67fef(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e2d74bb9e5fcab7bd1dcc8cf0a91f524a2d695a3f51786e135cc7f4e8e62d313(
value: typing.Optional[GrantPrivilegesToRoleOnAccountObject],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ce19377181accfbda92f741a0364170e08f650d7dc9ffc12bcf1337d71093c9e(
*,
all_schemas_in_database: typing.Optional[builtins.str] = None,
future_schemas_in_database: typing.Optional[builtins.str] = None,
schema_name: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__49a2c95a580eaf0ee6738063936b4fef69c97fede091faa3ca0214c9e97b79f8(
*,
all: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchemaObjectAll, typing.Dict[builtins.str, typing.Any]]] = None,
future: typing.Optional[typing.Union[GrantPrivilegesToRoleOnSchemaObjectFuture, typing.Dict[builtins.str, typing.Any]]] = None,
object_name: typing.Optional[builtins.str] = None,
object_type: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5a78345955dd2a438762ae789a5161393c47155a724d3f7031b9d187b7a38aca(
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__91b52dbcb08941435a4d64a4078887926b61df6a885912b999aa2dba57c4ba3a(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5988d10133134091f19761ea487cd1306ef84b23a0573e386806c8e3b970e26d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ffd6066af8b0c1195500de025d1940ced1ac7890d7b836d0ab8f7c6f0ebe0e09(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__06ab70d0b240379a72a9d0a97ea2644edf5bfc1a9dfb0539d6fe9cc72d5152ea(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c919a2915e109109fc271c4aae63b898a7ddc76852ca1b4622f035831274ce3f(
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObjectAll],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bebb066b98cbf145907bc2aad286413d5a9af2e5923bf69482efed25e1265763(
*,
object_type_plural: builtins.str,
in_database: typing.Optional[builtins.str] = None,
in_schema: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__94d27973acbbfc48efc6278f590b2d0f6fd2c662d6cc3dfe68b0a552498b33df(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4cf29d2a89db7765549ca4a2f735982395dfd731b2c7da88be3f5f23da2e544f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a2242ae018689f76b3afe88d3ab2e8833f11b087de69e05de9ddef722ba34074(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0a1784a67a66ed16fd66a7fb2ba1117c858f57dabdf4d4ed68d00a8f2db84567(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__972ae2e364890acdbadafbf89145bf5ad9c87c2cfd97a054f403bd8eb865e913(
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObjectFuture],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c8e83d6cd40736f9207e84cc72c752ed7018db58eb74c116f38759914ce89588(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b35e3c5337bed8f57f685a1ba20743248e5012fe6cad62ad9084994b349e934d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d2f0810f0321e585ddc691b620d8457d63489c59904dc0ae7e0f51f2a37af191(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ab012cbcbfb24d462cc9243a13719d703264fe6703385a7453575606e73fd794(
value: typing.Optional[GrantPrivilegesToRoleOnSchemaObject],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ee31efba5e60af4723ca7667d32503ae34df8d3791464bf32e6d27cdb357ff03(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__882de92b3793c0bb0d2e7563fa4f3746f3b451700d4492e6c8516911234cd285(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3576a871f8f87e60aede310ea2dadfc47a3cc0028bb7b9187855f6659e499a25(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cf341a0afaa542dd30aca7ec4e443f71099bf8b86a477bfdbf8acc02284b4038(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__19f1218636c7b861b319928bfdcfd6d7fe778ba8bedad7480e8c611cafc06449(
value: typing.Optional[GrantPrivilegesToRoleOnSchema],
) -> None:
"""Type checking stubs"""
pass
|
PypiClean
|
/cryptofeed-tks-2.3.0.tar.gz/cryptofeed-tks-2.3.0/cryptofeed/exchanges/independent_reserve.py
|
import asyncio
from decimal import Decimal
import logging
from typing import Dict, Tuple
from collections import defaultdict
from time import time
from yapic import json
from cryptofeed.connection import AsyncConnection, RestEndpoint, Routes, WebsocketEndpoint
from cryptofeed.defines import BID, BUY, ASK, INDEPENDENT_RESERVE, L3_BOOK, SELL, TRADES
from cryptofeed.feed import Feed
from cryptofeed.symbols import Symbol
from cryptofeed.exceptions import MissingSequenceNumber
from cryptofeed.types import Trade, OrderBook
LOG = logging.getLogger('feedhandler')
class IndependentReserve(Feed):
id = INDEPENDENT_RESERVE
websocket_endpoints = [WebsocketEndpoint('wss://websockets.independentreserve.com')]
rest_endpoints = [RestEndpoint('https://api.independentreserve.com', routes=Routes(['/Public/GetValidPrimaryCurrencyCodes', '/Public/GetValidSecondaryCurrencyCodes'], l3book='/Public/GetAllOrders?primaryCurrencyCode={}&secondaryCurrencyCode={}'))]
websocket_channels = {
L3_BOOK: 'orderbook-{}',
TRADES: 'ticker-{}',
}
request_limit = 1
@classmethod
def _parse_symbol_data(cls, data: list) -> Tuple[Dict, Dict]:
ret = {}
info = defaultdict(dict)
bases, quotes = data
for base in bases:
for quote in quotes:
sym = Symbol(base.upper().replace('XBT', 'BTC'), quote.upper())
info['instrument_type'][sym.normalized] = sym.type
ret[sym.normalized] = f"{base.lower()}-{quote.lower()}"
return ret, info
def __reset(self):
self._l3_book = {}
self._order_ids = defaultdict(dict)
self._sequence_no = {}
async def _trade(self, msg: dict, timestamp: float):
'''
{
'Channel': 'ticker-eth-aud',
'Nonce': 78,
'Data': {
'TradeGuid': '6d1c2e90-592a-409c-a8d8-58b2d25e0b0b',
'Pair': 'eth-aud',
'TradeDate': datetime.datetime(2022, 1, 31, 8, 28, 26, 552573, tzinfo=datetime.timezone(datetime.timedelta(seconds=39600))),
'Price': Decimal('3650.81'),
'Volume': Decimal('0.543'),
'BidGuid': '0430e003-c35e-410e-85f5-f0bb5c40193b',
'OfferGuid': '559c1dd2-e681-4efc-b49b-14a07c069de4',
'Side': 'Sell'
},
'Time': 1643578106584,
'Event': 'Trade'
}
'''
t = Trade(
self.id,
self.exchange_symbol_to_std_symbol(msg['Data']['Pair']),
SELL if msg['Data']['Side'] == 'Sell' else BUY,
Decimal(msg['Data']['Volume']),
Decimal(msg['Data']['Price']),
self.timestamp_normalize(msg['Data']['TradeDate']),
id=msg['Data']['TradeGuid'],
raw=msg
)
await self.callback(TRADES, t, timestamp)
async def _book(self, msg: dict, timestamp: float):
'''
{
'Channel': 'orderbook-xbt',
'Nonce': 65605,
'Data': {
'OrderType': 'LimitBid',
'OrderGuid': 'fee7094c-1921-44b7-8d8d-8b6e1cedb270'
},
'Time': 1643931382903,
'Event': 'OrderCanceled'
}
{
'Channel': 'orderbook-xbt',
'Nonce': 65606,
'Data': {
'OrderType': 'LimitOffer',
'OrderGuid': '22a72137-9829-4e6c-b265-a38714256877',
'Price': {
'aud': Decimal('51833.41'),
'usd': Decimal('37191.23'),
'nzd': Decimal('55836.92'),
'sgd': Decimal('49892.59')
},
'Volume': Decimal('0.09')
},
'Time': 1643931382903,
'Event': 'NewOrder'
}
'''
seq_no = msg['Nonce']
base = msg['Channel'].split('-')[-1]
delta = {BID: [], ASK: []}
for symbol in self.subscription[self.std_channel_to_exchange(L3_BOOK)]:
if symbol.startswith(base):
quote = symbol.split('-')[-1]
instrument = self.exchange_symbol_to_std_symbol(f"{base}-{quote}")
if instrument in self._sequence_no and self._sequence_no[instrument] + 1 != seq_no:
raise MissingSequenceNumber
self._sequence_no[instrument] = seq_no
if instrument not in self._l3_book:
await self._snapshot(base, quote)
if msg['Event'] == 'OrderCanceled':
uuid = msg['Data']['OrderGuid']
if uuid in self._order_ids[instrument]:
price, side = self._order_ids[instrument][uuid]
if price in self._l3_book[instrument].book[side] and uuid in self._l3_book[instrument].book[side][price]:
del self._l3_book[instrument].book[side][price][uuid]
if len(self._l3_book[instrument].book[side][price]) == 0:
del self._l3_book[instrument].book[side][price]
delta[side].append((uuid, price, 0))
del self._order_ids[instrument][uuid]
else:
# during snapshots we might get cancelation messages that have already been removed
# from the snapshot, so we don't have anything to process, and we should not call the client callback
continue
elif msg['Event'] == 'NewOrder':
uuid = msg['Data']['OrderGuid']
price = msg['Data']['Price'][quote]
size = msg['Data']['Volume']
side = BID if msg['Data']['OrderType'].endswith('Bid') else ASK
self._order_ids[instrument][uuid] = (price, side)
if price in self._l3_book[instrument].book[side]:
self._l3_book[instrument].book[side][price][uuid] = size
else:
self._l3_book[instrument].book[side][price] = {uuid: size}
delta[side].append((uuid, price, size))
elif msg['event'] == 'OrderChanged':
uuid = msg['Data']['OrderGuid']
size = msg['Data']['Volume']
side = BID if msg['Data']['OrderType'].endswith('Bid') else ASK
if uuid in self._order_ids[instrument]:
price, side = self._order_ids[instrument][uuid]
if size == 0:
del self._l3_book[instrument][side][price][uuid]
if len(self._l3_book[instrument][side][price]) == 0:
del self._l3_book[instrument][side][price]
else:
self._l3_book[instrument][side][price][uuid] = size
del self._order_ids[instrument][uuid]
delta[side].append((uuid, price, size))
else:
continue
else:
raise ValueError("%s: Invalid OrderBook event message of type %s", self.id, msg)
await self.book_callback(L3_BOOK, self._l3_book[instrument], timestamp, raw=msg, sequence_number=seq_no, delta=delta, timestamp=msg['Time'] / 1000)
async def _snapshot(self, base: str, quote: str):
url = self.rest_endpoints[0].route('l3book', self.sandbox).format(base, quote)
timestamp = time()
ret = await self.http_conn.read(url)
await asyncio.sleep(1 / self.request_limit)
ret = json.loads(ret, parse_float=Decimal)
normalized = self.exchange_symbol_to_std_symbol(f"{base}-{quote}")
self._l3_book[normalized] = OrderBook(self.id, normalized, max_depth=self.max_depth)
for side, key in [(BID, 'BuyOrders'), (ASK, 'SellOrders')]:
for order in ret[key]:
price = Decimal(order['Price'])
size = Decimal(order['Volume'])
uuid = order['Guid']
self._order_ids[normalized][uuid] = (price, side)
if price in self._l3_book[normalized].book[side]:
self._l3_book[normalized].book[side][price][uuid] = size
else:
self._l3_book[normalized].book[side][price] = {uuid: size}
await self.book_callback(L3_BOOK, self._l3_book[normalized], timestamp, raw=ret)
async def message_handler(self, msg: str, conn: AsyncConnection, timestamp: float):
msg = json.loads(msg, parse_float=Decimal)
if msg['Event'] == 'Trade':
await self._trade(msg, timestamp)
elif msg['Event'] in ('OrderCanceled', 'OrderChanged', 'NewOrder'):
await self._book(msg, timestamp)
elif msg['Event'] in ('Subscriptions', 'Heartbeat', 'Unsubscribe'):
return
else:
LOG.warning("%s: Invalid message type %s", self.id, msg)
async def subscribe(self, conn: AsyncConnection):
self.__reset()
subs = []
for chan, symbols in conn.subscription.items():
if self.exchange_channel_to_std(chan) == L3_BOOK:
subs.extend([chan.format(s) for s in set([sym.split("-")[0] for sym in symbols])])
else:
subs.extend([chan.format(s) for s in symbols])
await conn.write(json.dumps({"Event": "Subscribe", "Data": subs}))
|
PypiClean
|
/retro_data_structures-0.23.0-py3-none-any.whl/retro_data_structures/properties/dkc_returns/archetypes/StunnedByGroundPoundBehaviorData.py
|
import dataclasses
import struct
import typing
from retro_data_structures.game_check import Game
from retro_data_structures.properties.base_property import BaseProperty
@dataclasses.dataclass()
class StunnedByGroundPoundBehaviorData(BaseProperty):
ground_pound_distance_vertical_multiplier: float = dataclasses.field(default=0.5)
stun_duration: float = dataclasses.field(default=3.0)
can_un_stun: bool = dataclasses.field(default=False)
minimum_stunned_time: float = dataclasses.field(default=0.33000001311302185)
re_stun_delay: float = dataclasses.field(default=0.0)
apply_boost_after_un_stun: bool = dataclasses.field(default=False)
apply_boost_after_stun: bool = dataclasses.field(default=False)
boost_duration: float = dataclasses.field(default=5.0)
boost_speed_modifier: float = dataclasses.field(default=1.0)
stun_only_when_on_ground: bool = dataclasses.field(default=False)
knockback_instead_of_stun: bool = dataclasses.field(default=False)
@classmethod
def game(cls) -> Game:
return Game.DKC_RETURNS
@classmethod
def from_stream(cls, data: typing.BinaryIO, size: typing.Optional[int] = None, default_override: typing.Optional[dict] = None):
property_count = struct.unpack(">H", data.read(2))[0]
if default_override is None and (result := _fast_decode(data, property_count)) is not None:
return result
present_fields = default_override or {}
for _ in range(property_count):
property_id, property_size = struct.unpack(">LH", data.read(6))
start = data.tell()
try:
property_name, decoder = _property_decoder[property_id]
present_fields[property_name] = decoder(data, property_size)
except KeyError:
raise RuntimeError(f"Unknown property: 0x{property_id:08x}")
assert data.tell() - start == property_size
return cls(**present_fields)
def to_stream(self, data: typing.BinaryIO, default_override: typing.Optional[dict] = None):
default_override = default_override or {}
data.write(b'\x00\x0b') # 11 properties
data.write(b'b\xf0\xcf\xfc') # 0x62f0cffc
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.ground_pound_distance_vertical_multiplier))
data.write(b'-\x8d\xb3\x1d') # 0x2d8db31d
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.stun_duration))
data.write(b'@\xa3r\xb6') # 0x40a372b6
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.can_un_stun))
data.write(b'\x1a\xc8\x13\xb9') # 0x1ac813b9
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.minimum_stunned_time))
data.write(b'\xba\xe2u^') # 0xbae2755e
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.re_stun_delay))
data.write(b'\x0cC\x13D') # 0xc431344
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.apply_boost_after_un_stun))
data.write(b'\x19wy\xd1') # 0x197779d1
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.apply_boost_after_stun))
data.write(b'\xbaIr\xfe') # 0xba4972fe
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.boost_duration))
data.write(b'\x85\xd3\x9c\xb7') # 0x85d39cb7
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.boost_speed_modifier))
data.write(b'8q\x13%') # 0x38711325
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.stun_only_when_on_ground))
data.write(b'\xe3\x8ar=') # 0xe38a723d
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.knockback_instead_of_stun))
@classmethod
def from_json(cls, data: dict):
return cls(
ground_pound_distance_vertical_multiplier=data['ground_pound_distance_vertical_multiplier'],
stun_duration=data['stun_duration'],
can_un_stun=data['can_un_stun'],
minimum_stunned_time=data['minimum_stunned_time'],
re_stun_delay=data['re_stun_delay'],
apply_boost_after_un_stun=data['apply_boost_after_un_stun'],
apply_boost_after_stun=data['apply_boost_after_stun'],
boost_duration=data['boost_duration'],
boost_speed_modifier=data['boost_speed_modifier'],
stun_only_when_on_ground=data['stun_only_when_on_ground'],
knockback_instead_of_stun=data['knockback_instead_of_stun'],
)
def to_json(self) -> dict:
return {
'ground_pound_distance_vertical_multiplier': self.ground_pound_distance_vertical_multiplier,
'stun_duration': self.stun_duration,
'can_un_stun': self.can_un_stun,
'minimum_stunned_time': self.minimum_stunned_time,
're_stun_delay': self.re_stun_delay,
'apply_boost_after_un_stun': self.apply_boost_after_un_stun,
'apply_boost_after_stun': self.apply_boost_after_stun,
'boost_duration': self.boost_duration,
'boost_speed_modifier': self.boost_speed_modifier,
'stun_only_when_on_ground': self.stun_only_when_on_ground,
'knockback_instead_of_stun': self.knockback_instead_of_stun,
}
_FAST_FORMAT = None
_FAST_IDS = (0x62f0cffc, 0x2d8db31d, 0x40a372b6, 0x1ac813b9, 0xbae2755e, 0xc431344, 0x197779d1, 0xba4972fe, 0x85d39cb7, 0x38711325, 0xe38a723d)
def _fast_decode(data: typing.BinaryIO, property_count: int) -> typing.Optional[StunnedByGroundPoundBehaviorData]:
if property_count != 11:
return None
global _FAST_FORMAT
if _FAST_FORMAT is None:
_FAST_FORMAT = struct.Struct('>LHfLHfLH?LHfLHfLH?LH?LHfLHfLH?LH?')
before = data.tell()
dec = _FAST_FORMAT.unpack(data.read(95))
if (dec[0], dec[3], dec[6], dec[9], dec[12], dec[15], dec[18], dec[21], dec[24], dec[27], dec[30]) != _FAST_IDS:
data.seek(before)
return None
return StunnedByGroundPoundBehaviorData(
dec[2],
dec[5],
dec[8],
dec[11],
dec[14],
dec[17],
dec[20],
dec[23],
dec[26],
dec[29],
dec[32],
)
def _decode_ground_pound_distance_vertical_multiplier(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_stun_duration(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_can_un_stun(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_minimum_stunned_time(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_re_stun_delay(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_apply_boost_after_un_stun(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_apply_boost_after_stun(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_boost_duration(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_boost_speed_modifier(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_stun_only_when_on_ground(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_knockback_instead_of_stun(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
_property_decoder: typing.Dict[int, typing.Tuple[str, typing.Callable[[typing.BinaryIO, int], typing.Any]]] = {
0x62f0cffc: ('ground_pound_distance_vertical_multiplier', _decode_ground_pound_distance_vertical_multiplier),
0x2d8db31d: ('stun_duration', _decode_stun_duration),
0x40a372b6: ('can_un_stun', _decode_can_un_stun),
0x1ac813b9: ('minimum_stunned_time', _decode_minimum_stunned_time),
0xbae2755e: ('re_stun_delay', _decode_re_stun_delay),
0xc431344: ('apply_boost_after_un_stun', _decode_apply_boost_after_un_stun),
0x197779d1: ('apply_boost_after_stun', _decode_apply_boost_after_stun),
0xba4972fe: ('boost_duration', _decode_boost_duration),
0x85d39cb7: ('boost_speed_modifier', _decode_boost_speed_modifier),
0x38711325: ('stun_only_when_on_ground', _decode_stun_only_when_on_ground),
0xe38a723d: ('knockback_instead_of_stun', _decode_knockback_instead_of_stun),
}
|
PypiClean
|
/tw.dojo-0.9.181.tar.gz/tw.dojo-0.9.181/tw/dojo/static/1.8.1/debug/dojox/editor/plugins/nls/ru/latinEntities.js.uncompressed.js
|
define(
"dojox/editor/plugins/nls/ru/latinEntities", ({
/* These are already handled in the default RTE
amp:"ampersand",lt:"less-than sign",
gt:"greater-than sign",
nbsp:"no-break space\nnon-breaking space",
quot:"quote",
*/
iexcl:"инвертированный восклицательный знак",
cent:"символ цента",
pound:"символ фунта стерлингов",
curren:"символ денежной единицы",
yen:"символ иены\nсимвол юаня",
brvbar:"прерывистая черта\nвертикальная прерывистая черта",
sect:"символ раздела",
uml:"трема\nтрема с интервалом",
copy:"символ авторских прав",
ordf:"индикатор женского рода",
laquo:"левые двойный угловые кавычки\nлевые кавычки",
not:"знак отрицания",
shy:"мягкий перенос\nвозможный перенос",
reg:"символ Зарегистрирован\nсимвол зарегистрированного товарного знака",
macr:"знак долготы\nзнак долготы с интервалом\nверхняя черта\nнадчеркивание APL",
deg:"символ градусов",
plusmn:"символ плюс-минус\nсимвол плюса или минуса",
sup2:"верхний индекс два\nверхний индекс с цифрой два\nвозведение в квадрат",
sup3:"верхний индекс три\nверхний индекс с цифрой три\nвозведение в куб",
acute:"знак акут\nакут с интервалом",
micro:"знак микро",
para:"символ абзаца\nсимвол параграфа",
middot:"средняя точка\nзапятая в грузинском\nсредняя точка в греческом",
cedil:"седиль\nседиль с интервалом",
sup1:"верхний индекс один\nверхний индекс с цифрой один",
ordm:"индикатор мужского рода",
raquo:"правая двойная угловая кавычка\nправая кавычка",
frac14:"дробь одна четвертая\nодна четверть",
frac12:"дробь одна вторая\nодна вторая",
frac34:"дробь три четверти\nтри четверти",
iquest:"инвертированный знак вопроса\nперевернутый знак вопроса",
Agrave:"латинская прописная буква A с грависом",
Aacute:"латинская прописная буква A с акутом",
Acirc:"латинская прописная буква A с циркумфлексом",
Atilde:"латинская прописная буква A с тильдой",
Auml:"латинская прописная буква A с тремой",
Aring:"латинская прописная буква A с кружком сверху\nлатинская прописная буква A с кружком",
AElig:"латинская прописная буква AE\nлатинская прописная лигатура AE",
Ccedil:"латинская прописная буква C с седилью",
Egrave:"латинская прописная буква E с грависом",
Eacute:"латинская прописная буква E с акутом",
Ecirc:"латинская прописная буква E с циркумфлексом",
Euml:"латинская прописная буква E с тремой",
Igrave:"латинская прописная буква I с грависом",
Iacute:"латинская прописная буква I с акутом",
Icirc:"латинская прописная буква I с циркумфлексом",
Iuml:"латинская прописная буква I с тремой",
ETH:"латинская прописная буква ETH",
Ntilde:"латинская прописная буква N с тильдой",
Ograve:"латинская прописная буква O с грависом",
Oacute:"латинская прописная буква O с акутом",
Ocirc:"латинская прописная буква O с циркумфлексом",
Otilde:"латинская прописная буква O с тильдой",
Ouml:"латинская прописная буква O с тремой",
times:"знак умножения",
Oslash:"латинская прописная буква O перечеркнутая\nлатинская прописная буква O с вертикальной чертой",
Ugrave:"латинская прописная буква U с грависом",
Uacute:"латинская прописная буква U с акутом",
Ucirc:"латинская прописная буква U с циркумфлексом",
Uuml:"латинская прописная буква U с тремой",
Yacute:"латинская прописная буква Y с акутом",
THORN:"латинская прописная буква THORN",
szlig:"латинская строчная заостренная s\nэсцэт",
agrave:"латинская строчная буква a с грависом\nлатинская строчная a с грависом",
aacute:"латинская строчная буква a с акутом",
acirc:"латинская строчная буква a с циркумфлексом",
atilde:"латинская строчная буква a с тильдой",
auml:"латинская строчная буква a с тремой",
aring:"латинская строчная буква a с кружком сверху\nлатинская строчная a с кружком",
aelig:"латинская строчная буква ae\nлатинская строчная лигатура ae",
ccedil:"латинская строчная буква c с седилью",
egrave:"латинская строчная буква e с грависом",
eacute:"латинская строчная буква e с акутом",
ecirc:"латинская строчная буква e с циркумфлексом",
euml:"латинская строчная буква e с тремой",
igrave:"латинская строчная буква i с грависом",
iacute:"латинская строчная буква i с акутом",
icirc:"латинская строчная буква i с циркумфлексом",
iuml:"латинская строчная буква i с тремой",
eth:"латинская строчная буква eth",
ntilde:"латинская строчная буква n с тильдой",
ograve:"латинская строчная буква o с грависом",
oacute:"латинская строчная буква o с акутом",
ocirc:"латинская строчная буква o с циркумфлексом",
otilde:"латинская строчная буква o с тильдой",
ouml:"латинская строчная буква o с тремой",
divide:"знак деления",
oslash:"латинская строчная буква o перечеркнутая\nлатинская строчная буква o с вертикальной чертой",
ugrave:"латинская строчная буква u с грависом",
uacute:"латинская строчная буква u с акутом",
ucirc:"латинская строчная буква u с циркумфлексом",
uuml:"латинская строчная буква u с тремой",
yacute:"латинская строчная буква y с акутом",
thorn:"латинская строчная буква thorn",
yuml:"латинская строчная буква y с тремой",
// Greek Characters and Symbols
fnof:"латинская строчная буква f с хвостиком\nфункция\nфлорин",
Alpha:"греческая прописная буква альфа",
Beta:"греческая прописная буква бета",
Gamma:"греческая прописная буква гамма",
Delta:"греческая прописная буква дельта",
Epsilon:"греческая прописная буква эпсилон",
Zeta:"греческая прописная буква дзета",
Eta:"греческая прописная буква эта",
Theta:"греческая прописная буква тета",
Iota:"греческая прописная буква йота",
Kappa:"греческая прописная буква каппа",
Lambda:"греческая прописная буква лямбда",
Mu:"греческая прописная буква мю",
Nu:"греческая прописная буква ню",
Xi:"греческая прописная буква кси",
Omicron:"греческая прописная буква омикрон",
Pi:"греческая прописная буква пи",
Rho:"греческая прописная буква ро",
Sigma:"греческая прописная буква сигма",
Tau:"греческая прописная буква тау",
Upsilon:"греческая прописная буква ипсилон",
Phi:"греческая прописная буква фи",
Chi:"греческая прописная буква хи",
Psi:"греческая прописная буква пси",
Omega:"греческая прописная буква омега",
alpha:"греческая строчная буква альфа",
beta:"греческая строчная буква бета",
gamma:"греческая строчная буква гамма",
delta:"греческая строчная буква дельта",
epsilon:"греческая строчная буква эпсилон",
zeta:"греческая строчная буква дзета",
eta:"греческая строчная буква эта",
theta:"греческая строчная буква тета",
iota:"греческая строчная буква йота",
kappa:"греческая строчная буква каппа",
lambda:"греческая строчная буква лямбда",
mu:"греческая строчная буква мю",
nu:"греческая строчная буква ню",
xi:"греческая строчная буква хи",
omicron:"греческая строчная буква омикрон",
pi:"греческая строчная буква пи",
rho:"греческая строчная буква ро",
sigmaf:"греческая строчная конечная сигма",
sigma:"греческая строчная буква сигма",
tau:"греческая строчная буква тау",
upsilon:"греческая строчная буква ипсилон",
phi:"греческая строчная буква фи",
chi:"греческая строчная буква хи",
psi:"греческая строчная буква пси",
omega:"греческая строчная буква омега",
thetasym:"греческая символьная тета",
upsih:"греческий ипсилон с хвостиком",
piv:"греческая символьная пи",
bull:"маркер списка\nчерный маленький кружок",
hellip:"многоточие\nтри точки",
prime:"штрих\nминуты\nфуты",
Prime:"двойной штрих\nсекунды\nдюймы",
oline:"верхняя черта\nнадчеркивание с интервалом",
frasl:"косая черта",
weierp:"рукописная прописная P\nстепенное множество\nфункции Вейерштрасса",
image:"черная прописная I\nмнимая часть",
real:"черная прописная R\nвещественная часть",
trade:"символ товарного знака",
alefsym:"буква Алеф\nкардинальное число",
larr:"стрелка влево",
uarr:"стрелка вверх",
rarr:"стрелка вправо",
darr:"стрелка вниз",
harr:"стрелка влево-вправо",
crarr:"стрелка вниз с углом вправо\nвозврат каретки",
lArr:"двойная стрелка влево",
uArr:"двойная стрелка вверх",
rArr:"двойная стрелка вправо",
dArr:"двойная стрелка вниз",
hArr:"двойная стрелка влево-вправо",
forall:"для всех",
part:"частичный дифференциал",
exist:"существует",
empty:"пустой набор\nпустое множество\nдиаметр",
nabla:"оператор набла\nразностное отношение назад",
isin:"является элементом",
notin:"не является элементом",
ni:"содержит в качестве элемента",
prod:"n-арное произведение\nсимвол произведения",
sum:"n-арное суммирование",
minus:"знак минуса",
lowast:"оператор звездочка",
radic:"квадратный корень\nзнак корня",
prop:"пропорционально",
infin:"бесконечность",
ang:"угол",
and:"логическое И\nклин вверх",
or:"логическое ИЛИ\nклин вниз",
cap:"пересечение\nшапочка",
cup:"объединение\nчашечка","int":"интеграл",
there4:"следовательно",
sim:"оператор тильда\nизменяться с\nподобно",
cong:"приблизительно равно",
asymp:"почти равно\nасимптотично",
ne:"не равно",
equiv:"идентично",
le:"меньше или равно",
ge:"больше или равно",
sub:"подмножество",
sup:"включает в себя",
nsub:"не является подмножеством",
sube:"является подмножеством или эквивалентно",
supe:"включает в себя или эквивалентно",
oplus:"плюс в круге\nпрямая сумма",
otimes:"умножение в круге\nвекторное произведение",
perp:"перевернутый гвоздь\nортогонально к\nперпендикулярно",
sdot:"оператор точка",
lceil:"левая скобка округления вверх\nAPL upstile",
rceil:"правая скобка округления вверх",
lfloor:"левая скобка округления вниз\nAPL downstile",
rfloor:"правая скобка округления вниз",
lang:"левая угловая скобка",
rang:"правая угловая скобка",
loz:"ромб",
spades:"пики",
clubs:"крести\nтрилистник",
hearts:"червы\nвалентинка",
diams:"бубны",
OElig:"латинская прописная лигатура OE",
oelig:"латинская строчная лигатура oe",
Scaron:"латинская прописная буква S с галочкой",
scaron:"латинская строчная буква s с галочкой",
Yuml:"латинская прописная буква Y с тремой",
circ:"надстрочный знак циркумфлекс",
tilde:"малая тильда",
ensp:"пробел длины N",
emsp:"пробел длины M",
thinsp:"узкий пробел",
zwnj:"разделитель нулевой ширины",
zwj:"соединитель нулевой ширины",
lrm:"знак слева-направо",
rlm:"знак справа-налево",
ndash:"тире длины N",
mdash:"тире длины M",
lsquo:"левая одинарная кавычка",
rsquo:"правая одинарная кавычка",
sbquo:"одиночная нижняя кавычка",
ldquo:"левая двойная кавычка",
rdquo:"правая двойная кавычка",
bdquo:"двойная нижняя кавычка",
dagger:"крест",
Dagger:"двойной крест",
permil:"знак промилле",
lsaquo:"одинарная левая угловая кавычка",
rsaquo:"одинарная правая угловая кавычка",
euro:"символ евро"
})
);
|
PypiClean
|
/hdk_pkg_cri-0.0.8-py3-none-any.whl/hdk_pkg/process/func_BarPS.py
|
import numpy as np
import pandas as pd
from statsmodels.distributions.empirical_distribution import ECDF
#HA_Open = HA_Open_lb
#HA_Close = HA_Close_1b
def func_PS_Level(HA_Open, HA_Close, PS_pct_level=[0.35, 0.5, 0.95, 0.97], combine=False):
"""
0. This function is for calculating the HA bars' bar size level, or called Price Status(PS).
1. This function has 4 arguments and return 2 arrays as output.
2. Input arguments including:
(1) HA_Open: DataFrame.
(2) HA_Close: DataFrame.
(3) PS_pct_level: list, optional, default value is [0.35, 0.5, 0.95, 0.97]
(4) combine: boolean, optional, default value is False, calculating the up bar and down bar separately,
while combine=True calculates the up bar and down bar combined.
3. Output are 2 arrays with 4 level values in each, including:
(1) HA_PS_positive_level
(2) HA_PS_negative_level
"""
# Initialize:
HA_num = len(HA_Close)
HA_level_num = len(PS_pct_level)
HA_bar_size = np.zeros((HA_num,1))
HA_bar_positive_size = np.zeros((HA_num,1))
HA_bar_negative_size = np.zeros((HA_num,1))
HA_PS_positive_level = np.zeros((HA_level_num,1))
HA_PS_negative_level = np.zeros((HA_level_num,1))
HA_positive_count = 0
HA_negative_count = 0
# HA_size & HA_ECDF
if combine == True:
HA_bar_size = abs(HA_Close - HA_Open)
HA_bar_positive_size = HA_bar_size
HA_bar_negative_size = -HA_bar_size
if combine == False:
# HA_size & HA_ECDF
for i in range(0, HA_num):
HA_bar_size[i, 0] = HA_Close[i] - HA_Open[i]
if HA_bar_size[i, 0] > 0:
HA_positive_count += 1
HA_bar_positive_size[HA_positive_count-1, 0] = HA_bar_size[i, 0]
if HA_bar_size[i, 0] < 0:
HA_negative_count += 1
HA_bar_negative_size[HA_negative_count-1, 0] = HA_bar_size[i, 0]
if HA_positive_count == 0:
HA_bar_positive_size = HA_bar_positive_size[0:HA_negative_count, 0]
else:
HA_bar_positive_size = HA_bar_positive_size[0:HA_positive_count, 0]
if HA_negative_count == 0:
HA_bar_negative_size = HA_bar_negative_size[0:HA_positive_count, 0]
else:
HA_bar_negative_size = HA_bar_negative_size[0:HA_negative_count, 0]
HA_positive_size = ECDF(HA_bar_positive_size).x
HA_positive_ecdf = ECDF(HA_bar_positive_size).y
HA_negative_size = ECDF(-HA_bar_negative_size).x
HA_negative_ecdf = ECDF(-HA_bar_negative_size).y
# Match ecdf with HA_bar_pct_level
for n in range(0, HA_level_num):
HA_PS_positive_level_idx = np.where(HA_positive_ecdf <= PS_pct_level[n])[0][-1]
HA_PS_positive_level[n] = HA_positive_size[HA_PS_positive_level_idx]
HA_PS_negative_level_idx = np.where(HA_negative_ecdf <= PS_pct_level[n])[0][-1]
HA_PS_negative_level[n] = -HA_negative_size[HA_PS_negative_level_idx]
return HA_PS_positive_level, HA_PS_negative_level
#HA_Open = data_HA.HA_Open
#HA_Close = data_HA.HA_Close
#HA_PS_Lookback=PS_window
def func_BarPS(HA_Open, HA_Close, HA_PS_Lookback, PS_pct_level=[0.35, 0.5, 0.95, 0.97], combine=False):
"""
0. This function is for calculating price trend number of HA bar, by looking back HA_PS_Lookback HA bars,
according to the previous bars' distribution, find the range (i.e. -4,-3,-2,-1,0,1,2,3,4) of the current bar.
1. This function has 5 arguments (one optional) and returns 1 DataFrame as output.
2. Input arguements including:
(1) HA_Open: Dataframe
(2) HA_Close: DataFrame
(3) HA_PS_Lookback: int, number of bars to lookback.
(4) PS_pct_level: list, optional, default value is [0.35, 0.5, 0.95, 0.97]
(5) combine: boolean, optional, default value is False, calculating the up bar and down bar separately,
while combine=True calculates the up bar and down bar combined.
3. Output is 1 DataFrame
(1) HA_PS: Showed as -4,3,-2,-1,0,1,2,3,4, indicating the size of HA bars.
"""
# Initialize:
HA_num = len(HA_Open)
HA_PS = np.zeros_like(HA_Open)
HA_Open = HA_Open.values
HA_Close = HA_Close.values
# Main:
for i in range(HA_PS_Lookback, HA_num):
HA_Open_lb = HA_Open [i-HA_PS_Lookback:i]
HA_Close_1b = HA_Close[i-HA_PS_Lookback:i]
HA_PS_positive_level, HA_PS_negative_level = func_PS_Level(HA_Open_lb, HA_Close_1b, PS_pct_level, combine)
HA_range = HA_Close[i] - HA_Open[i]
if HA_range > 0:
HA_PS_temp = np.where(HA_range <= HA_PS_positive_level)[0] + 1
if len(HA_PS_temp) != 0:
HA_PS[i] = HA_PS_temp[0] - 1
else:
HA_PS[i] = len(HA_PS_positive_level) # -1
if HA_range < 0:
HA_PS_temp = np.where(HA_range >= HA_PS_negative_level)[0] + 1
if len(HA_PS_temp) != 0:
HA_PS[i] = -HA_PS_temp[0] + 1
else:
HA_PS[i] = -len(HA_PS_negative_level) # +1
HA_PS_df = pd.DataFrame(HA_PS, columns=['PS'])
return HA_PS_df
|
PypiClean
|
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/cacache/README.md
|
# cacache [](https://npm.im/cacache) [](https://npm.im/cacache) [](https://travis-ci.org/npm/cacache) [](https://ci.appveyor.com/project/npm/cacache) [](https://coveralls.io/github/npm/cacache?branch=latest)
[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
local key and content address caches. It's really fast, really good at
concurrency, and it will never give you corrupted data, even if cache files
get corrupted or manipulated.
On systems that support user and group settings on files, cacache will
match the `uid` and `gid` values to the folder where the cache lives, even
when running as `root`.
It was written to be used as [npm](https://npm.im)'s local cache, but can
just as easily be used on its own.
## Install
`$ npm install --save cacache`
## Table of Contents
* [Example](#example)
* [Features](#features)
* [Contributing](#contributing)
* [API](#api)
* [Using localized APIs](#localized-api)
* Reading
* [`ls`](#ls)
* [`ls.stream`](#ls-stream)
* [`get`](#get-data)
* [`get.stream`](#get-stream)
* [`get.info`](#get-info)
* [`get.hasContent`](#get-hasContent)
* Writing
* [`put`](#put-data)
* [`put.stream`](#put-stream)
* [`rm.all`](#rm-all)
* [`rm.entry`](#rm-entry)
* [`rm.content`](#rm-content)
* [`index.compact`](#index-compact)
* [`index.insert`](#index-insert)
* Utilities
* [`clearMemoized`](#clear-memoized)
* [`tmp.mkdir`](#tmp-mkdir)
* [`tmp.withTmp`](#with-tmp)
* Integrity
* [Subresource Integrity](#integrity)
* [`verify`](#verify)
* [`verify.lastRun`](#verify-last-run)
### Example
```javascript
const cacache = require('cacache')
const fs = require('fs')
const tarball = '/path/to/mytar.tgz'
const cachePath = '/tmp/my-toy-cache'
const key = 'my-unique-key-1234'
// Cache it! Use `cachePath` as the root of the content cache
cacache.put(cachePath, key, '10293801983029384').then(integrity => {
console.log(`Saved content to ${cachePath}.`)
})
const destination = '/tmp/mytar.tgz'
// Copy the contents out of the cache and into their destination!
// But this time, use stream instead!
cacache.get.stream(
cachePath, key
).pipe(
fs.createWriteStream(destination)
).on('finish', () => {
console.log('done extracting!')
})
// The same thing, but skip the key index.
cacache.get.byDigest(cachePath, integrityHash).then(data => {
fs.writeFile(destination, data, err => {
console.log('tarball data fetched based on its sha512sum and written out!')
})
})
```
### Features
* Extraction by key or by content address (shasum, etc)
* [Subresource Integrity](#integrity) web standard support
* Multi-hash support - safely host sha1, sha512, etc, in a single cache
* Automatic content deduplication
* Fault tolerance (immune to corruption, partial writes, process races, etc)
* Consistency guarantees on read and write (full data verification)
* Lockless, high-concurrency cache access
* Streaming support
* Promise support
* Fast -- sub-millisecond reads and writes including verification
* Arbitrary metadata storage
* Garbage collection and additional offline verification
* Thorough test coverage
* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
### Contributing
The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
Happy hacking!
### API
#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
Lists info for all entries currently in the cache as a single large object. Each
entry in the object will be keyed by the unique index key, with corresponding
[`get.info`](#get-info) objects as the values.
##### Example
```javascript
cacache.ls(cachePath).then(console.log)
// Output
{
'my-thing': {
key: 'my-thing',
integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
size: 4023948,
metadata: {
name: 'blah',
version: '1.2.3',
description: 'this was once a package but now it is my-thing'
}
},
'other-thing': {
key: 'other-thing',
integrity: 'sha1-ANothER+hasH=',
path: '.testcache/content/bada55',
time: 11992309289,
size: 111112
}
}
```
#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
Lists info for all entries currently in the cache as a single large object.
This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
returned as `'data'` events on the returned stream.
##### Example
```javascript
cacache.ls.stream(cachePath).on('data', console.log)
// Output
{
key: 'my-thing',
integrity: 'sha512-BaSe64HaSh',
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
size: 13423,
metadata: {
name: 'blah',
version: '1.2.3',
description: 'this was once a package but now it is my-thing'
}
}
{
key: 'other-thing',
integrity: 'whirlpool-WoWSoMuchSupport',
path: '.testcache/content/bada55',
time: 11992309289,
size: 498023984029
}
{
...
}
```
#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
Returns an object with the cached data, digest, and metadata identified by
`key`. The `data` property of this object will be a `Buffer` instance that
presumably holds some data that means something to you. I'm sure you know what
to do with it! cacache just won't care.
`integrity` is a [Subresource
Integrity](#integrity)
string. That is, a string that can be used to verify `data`, which looks like
`<hash-algorithm>-<base64-integrity-hash>`.
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, the promise will be rejected.
A sub-function, `get.byDigest` may be used for identical behavior, except lookup
will happen by integrity hash, bypassing the index entirely. This version of the
function *only* returns `data` itself, without any wrapper.
See: [options](#get-options)
##### Note
This function loads the entire cache entry into memory before returning it. If
you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
instead.
##### Example
```javascript
// Look up by key
cache.get(cachePath, 'my-thing').then(console.log)
// Output:
{
metadata: {
thingName: 'my'
},
integrity: 'sha512-BaSe64HaSh',
data: Buffer#<deadbeef>,
size: 9320
}
// Look up by digest
cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
// Output:
Buffer#<deadbeef>
```
#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, an error will be emitted.
`metadata` and `integrity` events will be emitted before the stream closes, if
you need to collect that extra data about the cached entry.
A sub-function, `get.stream.byDigest` may be used for identical behavior,
except lookup will happen by integrity hash, bypassing the index entirely. This
version does not emit the `metadata` and `integrity` events at all.
See: [options](#get-options)
##### Example
```javascript
// Look up by key
cache.get.stream(
cachePath, 'my-thing'
).on('metadata', metadata => {
console.log('metadata:', metadata)
}).on('integrity', integrity => {
console.log('integrity:', integrity)
}).pipe(
fs.createWriteStream('./x.tgz')
)
// Outputs:
metadata: { ... }
integrity: 'sha512-SoMeDIGest+64=='
// Look up by digest
cache.get.stream.byDigest(
cachePath, 'sha512-SoMeDIGest+64=='
).pipe(
fs.createWriteStream('./x.tgz')
)
```
#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
Looks up `key` in the cache index, returning information about the entry if
one exists.
##### Fields
* `key` - Key the entry was looked up under. Matches the `key` argument.
* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
* `path` - Filesystem path where content is stored, joined with `cache` argument.
* `time` - Timestamp the entry was first added on.
* `metadata` - User-assigned metadata associated with the entry/content.
##### Example
```javascript
cacache.get.info(cachePath, 'my-thing').then(console.log)
// Output
{
key: 'my-thing',
integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
path: '.testcache/content/deadbeef',
time: 12345698490,
size: 849234,
metadata: {
name: 'blah',
version: '1.2.3',
description: 'this was once a package but now it is my-thing'
}
}
```
#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
exists for this `integrity`, it will return an object, with the specific single integrity hash
that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
##### Example
```javascript
cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
// Output
{
sri: {
source: 'sha256-MUSTVERIFY+ALL/THINGS==',
algorithm: 'sha256',
digest: 'MUSTVERIFY+ALL/THINGS==',
options: []
},
size: 9001
}
cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
// Output
false
```
##### <a name="get-options"></a> Options
##### `opts.integrity`
If present, the pre-calculated digest for the inserted content. If this option
is provided and does not match the post-insertion digest, insertion will fail
with an `EINTEGRITY` error.
##### `opts.memoize`
Default: null
If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache.
##### `opts.size`
If provided, the data stream will be verified to check that enough data was
passed through. If there's more or less data than expected, insertion will fail
with an `EBADSIZE` error.
#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
Inserts data passed to it into the cache. The returned Promise resolves with a
digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
cache entry has been successfully written.
See: [options](#put-options)
##### Example
```javascript
fetch(
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
).then(data => {
return cacache.put(cachePath, 'registry.npmjs.org|[email protected]', data)
}).then(integrity => {
console.log('integrity hash is', integrity)
})
```
#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
Returns a [Writable
Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
data written to it into the cache. Emits an `integrity` event with the digest of
written contents when it succeeds.
See: [options](#put-options)
##### Example
```javascript
request.get(
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
).pipe(
cacache.put.stream(
cachePath, 'registry.npmjs.org|[email protected]'
).on('integrity', d => console.log(`integrity digest is ${d}`))
)
```
##### <a name="put-options"></a> Options
##### `opts.metadata`
Arbitrary metadata to be attached to the inserted key.
##### `opts.size`
If provided, the data stream will be verified to check that enough data was
passed through. If there's more or less data than expected, insertion will fail
with an `EBADSIZE` error.
##### `opts.integrity`
If present, the pre-calculated digest for the inserted content. If this option
is provided and does not match the post-insertion digest, insertion will fail
with an `EINTEGRITY` error.
`algorithms` has no effect if this option is present.
##### `opts.algorithms`
Default: ['sha512']
Hashing algorithms to use when calculating the [subresource integrity
digest](#integrity)
for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
may also use any anagram of `'modnar'` to use this feature.
Currently only supports one algorithm at a time (i.e., an array length of
exactly `1`). Has no effect if `opts.integrity` is present.
##### `opts.memoize`
Default: null
If provided, cacache will memoize the given cache insertion in memory, bypassing
any filesystem checks for that key or digest in future cache fetches. Nothing
will be written to the in-memory cache unless this option is explicitly truthy.
If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
and `set` methods), it will be written to instead of the global memoization
cache.
Reading from disk data can be forced by explicitly passing `memoize: false` to
the reader functions, but their default will be to read from memory.
##### `opts.tmpPrefix`
Default: null
Prefix to append on the temporary directory name inside the cache's tmp dir.
#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
Clears the entire cache. Mainly by blowing away the cache directory itself.
##### Example
```javascript
cacache.rm.all(cachePath).then(() => {
console.log('THE APOCALYPSE IS UPON US 😱')
})
```
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, [opts]) -> Promise`
Alias: `cacache.rm`
Removes the index entry for `key`. Content will still be accessible if
requested directly by content address ([`get.stream.byDigest`](#get-stream)).
By default, this appends a new entry to the index with an integrity of `null`.
If `opts.removeFully` is set to `true` then the index file itself will be
physically deleted rather than appending a `null`.
To remove the content itself (which might still be used by other entries), use
[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
[`verify`](#verify).
##### Example
```javascript
cacache.rm.entry(cachePath, 'my-thing').then(() => {
console.log('I did not like it anyway')
})
```
#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
Removes the content identified by `integrity`. Any index entries referring to it
will not be usable again until the content is re-added to the cache with an
identical digest.
##### Example
```javascript
cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
console.log('data for my-thing is gone!')
})
```
#### <a name="index-compact"></a> `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise`
Uses `matchFn`, which must be a synchronous function that accepts two entries
and returns a boolean indicating whether or not the two entries match, to
deduplicate all entries in the cache for the given `key`.
If `opts.validateEntry` is provided, it will be called as a function with the
only parameter being a single index entry. The function must return a Boolean,
if it returns `true` the entry is considered valid and will be kept in the index,
if it returns `false` the entry will be removed from the index.
If `opts.validateEntry` is not provided, however, every entry in the index will
be deduplicated and kept until the first `null` integrity is reached, removing
all entries that were written before the `null`.
The deduplicated list of entries is both written to the index, replacing the
existing content, and returned in the Promise.
#### <a name="index-insert"></a> `> cacache.index.insert(cache, key, integrity, opts) -> Promise`
Writes an index entry to the cache for the given `key` without writing content.
It is assumed if you are using this method, you have already stored the content
some other way and you only wish to add a new index to that content. The `metadata`
and `size` properties are read from `opts` and used as part of the index entry.
Returns a Promise resolving to the newly added entry.
#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
Completely resets the in-memory entry cache.
#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
Returns a unique temporary directory inside the cache's `tmp` dir. This
directory will use the same safe user assignment that all the other stuff use.
Once the directory is made, it's the user's responsibility that all files
within are given the appropriate `gid`/`uid` ownership settings to match
the rest of the cache. If not, you can ask cacache to do it for you by
calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
permissions.
If you want automatic cleanup of this directory, use
[`tmp.withTmp()`](#with-tpm)
See: [options](#tmp-options)
##### Example
```javascript
cacache.tmp.mkdir(cache).then(dir => {
fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
})
```
#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
Sets the `uid` and `gid` properties on all files and folders within the tmp
folder to match the rest of the cache.
Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
[`tmp.withTmp`](#with-tmp).
##### Example
```javascript
cacache.tmp.mkdir(cache).then(dir => {
writeFile(path.join(dir, 'file'), someData).then(() => {
// make sure we didn't just put a root-owned file in the cache
cacache.tmp.fix().then(() => {
// all uids and gids match now
})
})
})
```
#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
with it. The created temporary directory will be removed when the return value
of `cb()` resolves, the tmp directory will be automatically deleted once that
promise completes.
The same caveats apply when it comes to managing permissions for the tmp dir's
contents.
See: [options](#tmp-options)
##### Example
```javascript
cacache.tmp.withTmp(cache, dir => {
return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
}).then(() => {
// `dir` no longer exists
})
```
##### <a name="tmp-options"></a> Options
##### `opts.tmpPrefix`
Default: null
Prefix to append on the temporary directory name inside the cache's tmp dir.
#### <a name="integrity"></a> Subresource Integrity Digests
For content verification and addressing, cacache uses strings following the
[Subresource
Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
That is, any time cacache expects an `integrity` argument or option, it
should be in the format `<hashAlgorithm>-<base64-hash>`.
One deviation from the current spec is that cacache will support any hash
algorithms supported by the underlying Node.js process. You can use
`crypto.getHashes()` to see which ones you can use.
##### Generating Digests Yourself
If you have an existing content shasum, they are generally formatted as a
hexadecimal string (that is, a sha1 would look like:
`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
cacache, you'll need to convert this to an equivalent subresource integrity
string. For this example, the corresponding hash would be:
`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
If you want to generate an integrity string yourself for existing data, you can
use something like this:
```javascript
const crypto = require('crypto')
const hashAlgorithm = 'sha512'
const data = 'foobarbaz'
const integrity = (
hashAlgorithm +
'-' +
crypto.createHash(hashAlgorithm).update(data).digest('base64')
)
```
You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
around SRI strings, including generation, parsing, and translating from existing
hex-formatted strings.
#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
Checks out and fixes up your cache:
* Cleans up corrupted or invalid index entries.
* Custom entry filtering options.
* Garbage collects any content entries not referenced by the index.
* Checks integrity for all content entries and removes invalid content.
* Fixes cache ownership.
* Removes the `tmp` directory in the cache and all its contents.
When it's done, it'll return an object with various stats about the verification
process, including amount of storage reclaimed, number of valid entries, number
of entries removed, etc.
##### <a name="verify-options"></a> Options
##### `opts.concurrency`
Default: 20
Number of concurrently read files in the filesystem while doing clean up.
##### `opts.filter`
Receives a formatted entry. Return false to remove it.
Note: might be called more than once on the same entry.
##### `opts.log`
Custom logger function:
```
log: { silly () {} }
log.silly('verify', 'verifying cache at', cache)
```
##### Example
```sh
echo somegarbage >> $CACHEPATH/content/deadbeef
```
```javascript
cacache.verify(cachePath).then(stats => {
// deadbeef collected, because of invalid checksum.
console.log('cache is much nicer now! stats:', stats)
})
```
#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
##### Example
```javascript
cacache.verify(cachePath).then(() => {
cacache.verify.lastRun(cachePath).then(lastTime => {
console.log('cacache.verify was last called on' + lastTime)
})
})
```
|
PypiClean
|
/black-it-0.2.1.tar.gz/black-it-0.2.1/black_it/samplers/particle_swarm.py
|
"""Implementation of the particle swarm sampler."""
from typing import Optional, cast
import numpy as np
from numpy.typing import NDArray
from black_it.samplers.base import BaseSampler
from black_it.search_space import SearchSpace
from black_it.utils.base import _assert, digitize_data, positive_float
class ParticleSwarmSampler(
BaseSampler
): # pylint: disable=(too-many-instance-attributes)
"""
Implementation of a particle swarm sampler.
This sampler implements the particle swarm sampling method commonly used in particle swarm optimization (PSO),
introduced in:
Eberhart, Russell, and James Kennedy. "A new optimizer using particle swarm theory."
MHS'95. Proceedings of the sixth international symposium on micro machine and human science. IEEE, 1995.
In a particle swarm optimizer, there is a set of particles that are "evolved" by cooperation and competition
among the individuals themselves through generations. Each particle adjusts its flying according to its own
flying experience and its companions’ flying experience. Each particle, in fact, represents a potential solution
to a problem. Each particle is treated as a point in a D-dimensional space. The ith particle is represented as
Xi = (x_{i1},...,x_{iD}). The best previous position (the position giving the best fitness value) of any particle
is recorded and represented as Pi = (p_{i1},...,p_{iD}). The index of the best particle among all the particles
in the population is represented by the symbol g. The rate of the position change (velocity) for particle i is
represented as Vi = (v_{i1},...,v_{iD}). The particles are manipulated according to the following equation:
v_{id} = (ω * v_{id}) + (c1 * r1 * (p_{id} - x_{id})) + (c2 * r2 * (p_{gd} - x_{id})
x_{id} = x_{id} + v_{id}
Where:
- ω is the inertia weight to control the influence of the previous velocity;
- c1 and c2 are positive values that represent the acceleration constants;
- r1 and r2 are two random numbers uniformly distributed in the range of (0, 1).
Note that p_{gd}, the global best position found across the dynamics, can optionally be computed by also
considering the sampling performed by other samplers in order to let them interfere constructively with the
Particle Swarm Sampler.
"""
def __init__(
self,
batch_size: int,
random_state: Optional[int] = None,
inertia: float = 0.9,
c1: float = 0.1,
c2: float = 0.1,
global_minimum_across_samplers: bool = False,
) -> None:
"""
Initialize the sampler.
Args:
batch_size: the number of points sampled every time the sampler is called
random_state: the random state of the sampler, fixing this number the sampler behaves deterministically
inertia: the inertia of the particles' motion
c1: first acceleration constant
c2: second acceleration constant
global_minimum_across_samplers: if True, the global minimum attractor of the particles' dynamics is computed
taking into consideration also parameters sampled by other samplers, default is False
"""
# max_duplication_passes must be zero because the sampler is stateful
super().__init__(
batch_size, random_state=random_state, max_deduplication_passes=0
)
# The batch size is the number of sampled parameters per iteration. In a Black-it sampler, each call to
# sample_batch represent an iteration of the particle swarm sampler, so it seems natural to set the number of
# particles to the batch size, as at each iteration sample_batch returns the current positions of the
# particles.
self.nb_particles = batch_size
self._inertia = positive_float(inertia)
self._c1 = positive_float(c1)
self._c2 = positive_float(c2)
self._global_minimum_across_samplers = global_minimum_across_samplers
# all current particle positions; shape=(nb_particles, space dimensions)
self._curr_particle_positions: Optional[NDArray] = None
# all current particle velocities; shape=(nb_particles, space dimensions)
self._curr_particle_velocities: Optional[NDArray] = None
# best particle positions, i.e. ; shape=(nb_particles, space dimensions)
self._best_particle_positions: Optional[NDArray] = None
# losses of the best positions
self._best_position_losses: Optional[NDArray] = None
# particle id of the global best particle position
self._global_best_particle_id: Optional[int] = None
# best point in parameter space - could be the best across samplers
self._best_point: Optional[NDArray] = None
self._previous_batch_index_start: Optional[int] = None
@property
def is_set_up(self) -> bool:
"""Return true iff the sampler is already set up."""
return self._curr_particle_positions is not None
@property
def inertia(self) -> float:
"""Get the inertia weight."""
return self._inertia
@property
def c1(self) -> float:
"""Get the c1 constant."""
return self._c1
@property
def c2(self) -> float:
"""Get the c2 constant."""
return self._c2
def _set_up(self, dims: int) -> None:
"""Set up the sampler."""
self._curr_particle_positions = self.random_generator.random(
size=(self.batch_size, dims)
)
self._curr_particle_velocities = (
self.random_generator.random(
size=cast(NDArray, self._curr_particle_positions).shape
)
- 0.5
)
self._best_particle_positions = self._curr_particle_positions
# set losses to inf as we are interested to the min
self._best_position_losses = np.full(self.nb_particles, np.inf)
# we don't know yet which is the best index - initialize to 0
self._global_best_particle_id = 0
def _get_best_position(self) -> NDArray[np.float64]:
"""
Get the position corresponding to the global optimum the particles should converge to.
If _global_minimum_across_samplers is False, then this method returns the current position
of the particle that in its history has sampled, so far, the best set of parameters.
Else, if _global_minimum_across_samplers is True, then this method returns the point
in parameter space that achieved the minimum loss. Note that this point could have been
sampled by a different sampler than "self".
Returns:
a Numpy array
"""
if not self._global_minimum_across_samplers:
best_particle_positions = cast(NDArray, self._best_particle_positions)
return best_particle_positions[self._global_best_particle_id]
return cast(NDArray, self._best_point)
def reset(self) -> None:
"""Reset the sampler."""
self._curr_particle_positions = None
self._curr_particle_velocities = None
self._best_particle_positions = None
self._best_position_losses = None
self._global_best_particle_id = None
self._previous_batch_index_start = None
_assert(
not self.is_set_up,
error_message="reset call did not work, sampler still set up",
exception_class=RuntimeError,
)
def sample_batch(
self,
batch_size: int,
search_space: SearchSpace,
existing_points: NDArray[np.float64],
existing_losses: NDArray[np.float64],
) -> NDArray[np.float64]:
"""Sample a batch of parameters."""
if not self.is_set_up:
self._set_up(search_space.dims)
self._previous_batch_index_start = len(existing_points)
return digitize_data(
cast(NDArray[np.float64], self._best_particle_positions),
search_space.param_grid,
)
self._update_best(existing_points, existing_losses)
self._do_step()
p_bounds: NDArray[np.float64] = search_space.parameters_bounds
sampled_points = p_bounds[0] + self._curr_particle_positions * (
p_bounds[1] - p_bounds[0]
)
self._previous_batch_index_start = len(existing_points)
return digitize_data(sampled_points, search_space.param_grid)
def _update_best(
self, existing_points: NDArray[np.float64], existing_losses: NDArray[np.float64]
) -> None:
"""Update the best local and global positions."""
_assert(
self._previous_batch_index_start is not None,
exception_class=AssertionError,
error_message="should have been set",
)
# set best loss and best point
best_point_index = np.argmin(existing_losses)
self._best_point = existing_points[best_point_index]
# set best particle position
batch_index_start = cast(int, self._previous_batch_index_start)
batch_index_stop = batch_index_start + self.batch_size
previous_points = existing_points[batch_index_start:batch_index_stop]
previous_losses = existing_losses[batch_index_start:batch_index_stop]
for particle_id, (point, loss) in enumerate(
zip(previous_points, previous_losses)
):
best_particle_positions = cast(NDArray, self._best_particle_positions)
best_position_losses = cast(NDArray, self._best_position_losses)
if best_position_losses[particle_id] > loss:
best_particle_positions[particle_id] = point
best_position_losses[particle_id] = loss
# check if also the global best should be updated
best_global_loss = best_position_losses[self._global_best_particle_id]
if loss < best_global_loss:
self._global_best_particle_id = particle_id
def _do_step(self) -> None:
"""Do a step by updating particle positions and velocities."""
curr_particle_positions = cast(NDArray, self._curr_particle_positions)
curr_particle_velocities = cast(NDArray, self._curr_particle_velocities)
best_particle_positions = cast(NDArray, self._best_particle_positions)
r1_vec = self.random_generator.random(size=curr_particle_positions.shape)
r2_vec = self.random_generator.random(size=curr_particle_positions.shape)
new_particle_velocities = (
self.inertia * curr_particle_velocities
+ self.c1
* r1_vec
* (best_particle_positions - self._curr_particle_positions)
+ self.c2
* r2_vec
* (self._get_best_position() - self._curr_particle_positions) # type: ignore
)
self._curr_particle_positions = np.clip(
self._curr_particle_positions + new_particle_velocities,
a_min=0.0,
a_max=1.0,
)
self._curr_particle_velocities = new_particle_velocities
|
PypiClean
|
/finam-0.4.0-py3-none-any.whl/finam-0.4.0.dist-info/licenses/LICENSE.md
|
# SOFTWARE LICENCE
FINAM is an open-source component-based model coupling framework for environmental models.
## Copyright Notice
Copyright © 2023, the FINAM developers from Helmholtz-Zentrum für Umweltforschung GmbH - UFZ. All rights reserved.
***The code is a property of:***
> Helmholtz-Zentrum für Umweltforschung GmbH - UFZ<br/>
> Registered Office: Leipzig<br/>
> Registration Office: Amtsgericht Leipzig<br/>
> Trade Register Nr. B 4703<br/>
The list of FINAM developers is provided in the AUTHORS.md file.
***Contact:***
- FINAM Admins (E-mail: <[email protected]>)
- Martin Lange (E-mail: <[email protected]>)
- Sebastian Müller (E-mail: <[email protected]>)
- Stephan Thober (E-mail: <[email protected]>)
- Sabine Attinger (E-mail: <[email protected]>)
> Department Computational Hydrosystems (CHS)<br/>
> Helmholtz Centre for Environmental Research - UFZ<br/>
> Permoserstr. 15<br/>
> 04318 Leipzig, Germany
This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this program.
It can be found in the files `COPYING` and `COPYING.LESSER` provided with this software.
The complete GNU license text can also be found at < https://www.gnu.org/licenses/>.
## Redistribution
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, the list of conditions for redistribution and modification as well as the following GNU Lesser General Public License.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions, the following GNU Lesser General Public License and the modification conditions in the documentation and/or other materials provided with the distribution.
- Neither the name of Helmholtz-Zentrum für Umweltforschung GmbH - UFZ, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
## Modification
If software is modified to produce derivative works, such modified software should be clearly marked, so as not to confuse it with the version available from Helmholtz-Zentrum für Umweltforschung GmbH - UFZ.
|
PypiClean
|
/maykin_json_logic_py-0.11.0-py3-none-any.whl/json_logic/meta/expressions.py
|
from dataclasses import dataclass
from typing import cast
from ..typing import JSON, Primitive
from .base import Operation
NormalizedExpression = dict[str, list[JSON]]
JSONLogicExpressionTree = Operation | Primitive | list
def destructure(expression: NormalizedExpression) -> tuple[str, list[JSON]]:
"""
Decompose a normalized expression into the operator and arguments.
"""
operator_keys = [key for key in expression.keys() if not key.startswith("_")]
assert len(operator_keys) == 1, "Logic expression must have only one operator"
operator = operator_keys[0]
values = expression[operator]
return (operator, values)
@dataclass
class JSONLogicExpression:
expression: NormalizedExpression | Primitive | list[JSON]
@staticmethod
def normalize(expression: JSON) -> NormalizedExpression | Primitive | list[JSON]:
"""
Remove syntactic sugar for unary operators.
Normalization happens only on the provided expression, not on nested
expressions inside.
"""
# we only normalize one level at a time
if isinstance(expression, (list, Primitive)):
return cast(Primitive | list, expression)
assert isinstance(expression, dict)
operator, values = destructure(cast(dict, expression))
if not isinstance(values, list):
values = [values]
# make sure to keep any additional extension keys
return cast(NormalizedExpression, {**expression, operator: values})
@classmethod
def from_expression(cls, expression: JSON) -> "JSONLogicExpression":
normalized = cls.normalize(expression)
return cls(normalized)
def as_tree(self) -> JSONLogicExpressionTree:
"""
Convert the JSON expression into a tree with Operation nodes.
"""
if isinstance(self.expression, Primitive):
return self.expression
if isinstance(self.expression, list):
return [self.from_expression(child).as_tree() for child in self.expression]
assert isinstance(self.expression, dict)
operator, values = destructure(self.expression)
arguments = [
JSONLogicExpression.from_expression(value).as_tree() for value in values
]
return Operation.for_operator(
operator,
arguments=arguments,
source_expression=self.expression,
)
|
PypiClean
|
/ai_flow-0.3.1.tar.gz/ai_flow-0.3.1/airflow/models/connection.py
|
import json
import warnings
from json import JSONDecodeError
from typing import Dict, Optional
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlparse
from sqlalchemy import Boolean, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import synonym
from airflow.configuration import ensure_secrets_loaded
from airflow.exceptions import AirflowException, AirflowNotFoundException
from airflow.models.base import ID_LEN, Base
from airflow.models.crypto import get_fernet
from airflow.providers_manager import ProvidersManager
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.module_loading import import_string
def parse_netloc_to_hostname(*args, **kwargs):
"""This method is deprecated."""
warnings.warn("This method is deprecated.", DeprecationWarning)
return _parse_netloc_to_hostname(*args, **kwargs)
# Python automatically converts all letters to lowercase in hostname
# See: https://issues.apache.org/jira/browse/AIRFLOW-3615
def _parse_netloc_to_hostname(uri_parts):
"""Parse a URI string to get correct Hostname."""
hostname = unquote(uri_parts.hostname or '')
if '/' in hostname:
hostname = uri_parts.netloc
if "@" in hostname:
hostname = hostname.rsplit("@", 1)[1]
if ":" in hostname:
hostname = hostname.split(":", 1)[0]
hostname = unquote(hostname)
return hostname
class Connection(Base, LoggingMixin): # pylint: disable=too-many-instance-attributes
"""
Placeholder to store information about different database instances
connection information. The idea here is that scripts use references to
database instances (conn_id) instead of hard coding hostname, logins and
passwords when using operators or hooks.
.. seealso::
For more information on how to use this class, see: :doc:`/howto/connection`
:param conn_id: The connection ID.
:type conn_id: str
:param conn_type: The connection type.
:type conn_type: str
:param description: The connection description.
:type description: str
:param host: The host.
:type host: str
:param login: The login.
:type login: str
:param password: The password.
:type password: str
:param schema: The schema.
:type schema: str
:param port: The port number.
:type port: int
:param extra: Extra metadata. Non-standard data such as private/SSH keys can be saved here. JSON
encoded object.
:type extra: str
:param uri: URI address describing connection parameters.
:type uri: str
"""
__tablename__ = "connection"
id = Column(Integer(), primary_key=True)
conn_id = Column(String(ID_LEN), unique=True, nullable=False)
conn_type = Column(String(500), nullable=False)
description = Column(Text(5000))
host = Column(String(500))
schema = Column(String(500))
login = Column(String(500))
_password = Column('password', String(5000))
port = Column(Integer())
is_encrypted = Column(Boolean, unique=False, default=False)
is_extra_encrypted = Column(Boolean, unique=False, default=False)
_extra = Column('extra', String(5000))
def __init__( # pylint: disable=too-many-arguments
self,
conn_id: Optional[str] = None,
conn_type: Optional[str] = None,
description: Optional[str] = None,
host: Optional[str] = None,
login: Optional[str] = None,
password: Optional[str] = None,
schema: Optional[str] = None,
port: Optional[int] = None,
extra: Optional[str] = None,
uri: Optional[str] = None,
):
super().__init__()
self.conn_id = conn_id
self.description = description
if uri and ( # pylint: disable=too-many-boolean-expressions
conn_type or host or login or password or schema or port or extra
):
raise AirflowException(
"You must create an object using the URI or individual values "
"(conn_type, host, login, password, schema, port or extra)."
"You can't mix these two ways to create this object."
)
if uri:
self._parse_from_uri(uri)
else:
self.conn_type = conn_type
self.host = host
self.login = login
self.password = password
self.schema = schema
self.port = port
self.extra = extra
def parse_from_uri(self, **uri):
"""This method is deprecated. Please use uri parameter in constructor."""
warnings.warn(
"This method is deprecated. Please use uri parameter in constructor.", DeprecationWarning
)
self._parse_from_uri(**uri)
def _parse_from_uri(self, uri: str):
uri_parts = urlparse(uri)
conn_type = uri_parts.scheme
if conn_type == 'postgresql':
conn_type = 'postgres'
elif '-' in conn_type:
conn_type = conn_type.replace('-', '_')
self.conn_type = conn_type
self.host = _parse_netloc_to_hostname(uri_parts)
quoted_schema = uri_parts.path[1:]
self.schema = unquote(quoted_schema) if quoted_schema else quoted_schema
self.login = unquote(uri_parts.username) if uri_parts.username else uri_parts.username
self.password = unquote(uri_parts.password) if uri_parts.password else uri_parts.password
self.port = uri_parts.port
if uri_parts.query:
self.extra = json.dumps(dict(parse_qsl(uri_parts.query, keep_blank_values=True)))
def get_uri(self) -> str:
"""Return connection in URI format"""
uri = '{}://'.format(str(self.conn_type).lower().replace('_', '-'))
authority_block = ''
if self.login is not None:
authority_block += quote(self.login, safe='')
if self.password is not None:
authority_block += ':' + quote(self.password, safe='')
if authority_block > '':
authority_block += '@'
uri += authority_block
host_block = ''
if self.host:
host_block += quote(self.host, safe='')
if self.port:
if host_block > '':
host_block += f':{self.port}'
else:
host_block += f'@:{self.port}'
if self.schema:
host_block += '/{}'.format(quote(self.schema, safe=''))
uri += host_block
if self.extra_dejson:
uri += '?{}'.format(urlencode(self.extra_dejson))
return uri
def get_password(self) -> Optional[str]:
"""Return encrypted password."""
if self._password and self.is_encrypted:
fernet = get_fernet()
if not fernet.is_encrypted:
raise AirflowException(
"Can't decrypt encrypted password for login={}, \
FERNET_KEY configuration is missing".format(
self.login
)
)
return fernet.decrypt(bytes(self._password, 'utf-8')).decode()
else:
return self._password
def set_password(self, value: Optional[str]):
"""Encrypt password and set in object attribute."""
if value:
fernet = get_fernet()
self._password = fernet.encrypt(bytes(value, 'utf-8')).decode()
self.is_encrypted = fernet.is_encrypted
@declared_attr
def password(cls): # pylint: disable=no-self-argument
"""Password. The value is decrypted/encrypted when reading/setting the value."""
return synonym('_password', descriptor=property(cls.get_password, cls.set_password))
def get_extra(self) -> Dict:
"""Return encrypted extra-data."""
if self._extra and self.is_extra_encrypted:
fernet = get_fernet()
if not fernet.is_encrypted:
raise AirflowException(
"Can't decrypt `extra` params for login={},\
FERNET_KEY configuration is missing".format(
self.login
)
)
return fernet.decrypt(bytes(self._extra, 'utf-8')).decode()
else:
return self._extra
def set_extra(self, value: str):
"""Encrypt extra-data and save in object attribute to object."""
if value:
fernet = get_fernet()
self._extra = fernet.encrypt(bytes(value, 'utf-8')).decode()
self.is_extra_encrypted = fernet.is_encrypted
else:
self._extra = value
self.is_extra_encrypted = False
@declared_attr
def extra(cls): # pylint: disable=no-self-argument
"""Extra data. The value is decrypted/encrypted when reading/setting the value."""
return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra))
def rotate_fernet_key(self):
"""Encrypts data with a new key. See: :ref:`security/fernet`"""
fernet = get_fernet()
if self._password and self.is_encrypted:
self._password = fernet.rotate(self._password.encode('utf-8')).decode()
if self._extra and self.is_extra_encrypted:
self._extra = fernet.rotate(self._extra.encode('utf-8')).decode()
def get_hook(self):
"""Return hook based on conn_type."""
hook_class_name, conn_id_param, package_name, hook_name = ProvidersManager().hooks.get(
self.conn_type, (None, None, None, None)
)
if not hook_class_name:
raise AirflowException(f'Unknown hook type "{self.conn_type}"')
try:
hook_class = import_string(hook_class_name)
except ImportError:
warnings.warn(
"Could not import %s when discovering %s %s", hook_class_name, hook_name, package_name
)
raise
return hook_class(**{conn_id_param: self.conn_id})
def __repr__(self):
return self.conn_id
def log_info(self):
"""
This method is deprecated. You can read each field individually or use the
default representation (`__repr__`).
"""
warnings.warn(
"This method is deprecated. You can read each field individually or "
"use the default representation (__repr__).",
DeprecationWarning,
stacklevel=2,
)
return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format(
self.conn_id,
self.host,
self.port,
self.schema,
self.login,
"XXXXXXXX" if self.password else None,
"XXXXXXXX" if self.extra_dejson else None,
)
def debug_info(self):
"""
This method is deprecated. You can read each field individually or use the
default representation (`__repr__`).
"""
warnings.warn(
"This method is deprecated. You can read each field individually or "
"use the default representation (__repr__).",
DeprecationWarning,
stacklevel=2,
)
return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format(
self.conn_id,
self.host,
self.port,
self.schema,
self.login,
"XXXXXXXX" if self.password else None,
self.extra_dejson,
)
@property
def extra_dejson(self) -> Dict:
"""Returns the extra property by deserializing json."""
obj = {}
if self.extra:
try:
obj = json.loads(self.extra)
except JSONDecodeError as e:
self.log.exception(e)
self.log.error("Failed parsing the json for conn_id %s", self.conn_id)
return obj
@classmethod
def get_connection_from_secrets(cls, conn_id: str) -> 'Connection':
"""
Get connection by conn_id.
:param conn_id: connection id
:return: connection
"""
for secrets_backend in ensure_secrets_loaded():
conn = secrets_backend.get_connection(conn_id=conn_id)
if conn:
return conn
raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined")
|
PypiClean
|
/azure-mgmt-cognitiveservices-13.5.0.zip/azure-mgmt-cognitiveservices-13.5.0/azure/mgmt/cognitiveservices/operations/_models_operations.py
|
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
from .._vendor import CognitiveServicesManagementClientMixinABC, _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.CognitiveServices/locations/{location}/models",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"location": _SERIALIZER.url("location", location, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class ModelsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.cognitiveservices.CognitiveServicesManagementClient`'s
:attr:`models` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, location: str, **kwargs: Any) -> Iterable["_models.Model"]:
"""List Models.
:param location: Resource location. Required.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Model or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cognitiveservices.models.Model]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ModelListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ModelListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/providers/Microsoft.CognitiveServices/locations/{location}/models"
}
|
PypiClean
|
/django-rest-framework-paginations-0.0.1.tar.gz/django-rest-framework-paginations-0.0.1/README.rst
|
django-rest-framework-paginations
======================================
|build-status-image| |pypi-version|
Overview
--------
Extra paginations for the rest framework
Requirements
------------
- Python (2.7, 3.3, 3.4)
- Django (1.6, 1.7, 1.8)
- Django REST Framework (2.4, 3.0, 3.1)
Installation
------------
Install using ``pip``\ …
.. code:: bash
$ pip install django-rest-framework-paginations
Example
-------
TODO: Write example.
Testing
-------
Install testing requirements.
.. code:: bash
$ pip install -r requirements.txt
Run with runtests.
.. code:: bash
$ ./runtests.py
You can also use the excellent `tox`_ testing tool to run the tests
against all supported versions of Python and Django. Install tox
globally, and then simply run:
.. code:: bash
$ tox
Documentation
-------------
To build the documentation, you’ll need to install ``mkdocs``.
.. code:: bash
$ pip install mkdocs
To preview the documentation:
.. code:: bash
$ mkdocs serve
Running at: http://127.0.0.1:8000/
To build the documentation:
.. code:: bash
$ mkdocs build
.. _tox: http://tox.readthedocs.org/en/latest/
.. |build-status-image| image:: https://secure.travis-ci.org/variable/django-rest-framework-paginations.svg?branch=master
:target: http://travis-ci.org/variable/django-rest-framework-paginations?branch=master
.. |pypi-version| image:: https://img.shields.io/pypi/v/django-rest-framework-paginations.svg
:target: https://pypi.python.org/pypi/django-rest-framework-paginations
|
PypiClean
|
/i3-layouts-fixed-0.13.3.tar.gz/i3-layouts-fixed-0.13.3/i3l/handlers.py
|
from i3ipc import Connection, TickEvent
from i3ipc.events import WorkspaceEvent, WindowEvent
import logging
from i3l.options import LayoutName
from i3l.splitter import Mark
from i3l.state import State, RebuildCause, is_layout_container, is_floating_container
from i3l.layouts import Layouts
from i3l.ticks import Tick
logger = logging.getLogger(__name__)
def on_tick(layouts: Layouts, state: State):
def _on_tick(i3l: Connection, e: TickEvent):
logger.debug(f'[ipc] tick event - payload:{e.payload}')
if not e.payload.startswith('i3-layouts'):
return
context = state.sync_context(i3l)
tokens = e.payload.split(' ')
action_name = tokens[1]
action_params = tokens[2:]
tick = Tick.create(layouts, state, action_name)
if tick is not None:
tick.do(context, action_params)
return _on_tick
def on_workspace_focus(layouts: Layouts, state: State):
def _on_workspace_focus(i3l: Connection, e: WorkspaceEvent):
logger.debug(f'[ipc] workspace focus event - workspace:{e.current.name}, old:{e.old.name if e.old else "none"}')
context = state.sync_context(i3l)
if layouts.exists_for(e.current.name):
logger.debug(f' [ipc] workspace layouts exists for {e.current.name}')
sequence = state.add_workspace_sequence(e.current.name)
if state.prev_workspace_name != e.current.name and sequence.is_stale:
layout = layouts.get(context.workspace.name)
con_id = sequence.stale_con_id
state.start_rebuild(RebuildCause.WORKSPACE_FOCUS, context,
layout.mark_main(), layout.mark_last(), con_id)
sequence.set_stale(False)
elif state.prev_workspace_name != e.current.name:
state.end_rebuild(context, RebuildCause.WORKSPACE_FOCUS)
else:
logger.debug(f' [ipc] no workspace layouts exists for {e.current.name}')
state.end_rebuild(context, RebuildCause.WORKSPACE_FOCUS)
state.prev_workspace_name = e.current.name
if e.old:
state.old_workspace_name = e.old.name
if layouts.exists_for(e.old.name):
state.add_workspace_sequence(e.old.name)
return _on_workspace_focus
def on_window_close(layouts: Layouts, state: State):
def _on_window_close(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window close event - container:{e.container.id}')
context = state.sync_context(i3l)
if not layouts.exists_for(context.workspace.name):
logger.debug(' [ipc] window close event - no workspace layout')
return
if not state.rebuild_closed_container(e.container.window):
layout = layouts.get(context.workspace.name)
state.start_rebuild(RebuildCause.WINDOW_CLOSE, context,
layout.mark_main(), layout.mark_last(), e.container.id)
return _on_window_close
def on_window_move(layouts: Layouts, state: State):
def _on_window_move(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window move event - container:{e.container.id}')
context = state.sync_context(i3l)
if context.contains_container(e.container.id) or e.container.type != 'con':
logger.debug(' [ipc] window move event - inside workspace')
return
if layouts.exists_for(state.old_workspace_name):
logger.debug(' [ipc] window move event - to another workspace')
sequence = state.get_workspace_sequence(state.old_workspace_name)
sequence.set_order(e.container)
sequence.set_stale(True, e.container.id)
if layouts.exists_for(context.workspace.name):
layout = layouts.get(context.workspace.name)
state.start_rebuild(RebuildCause.WINDOW_MOVE, context,
layout.mark_main(), layout.mark_last(), e.container.id)
return _on_window_move
def on_window_new(layouts: Layouts, state: State):
def _on_window_new(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window new event - container:{e.container.id}:{e.container.window}')
context = state.sync_context(i3l)
if not layouts.exists_for(context.workspace.name) or context.workspace_sequence is None:
logger.debug(' [ipc] window new event - no workspace layout')
return
if not is_layout_container(e.container):
logger.debug(' [ipc] window new event - not a layout container')
return
if len(context.containers) == 0:
logger.debug(' [ipc] window new event - no container to handle')
return
context.workspace_sequence.set_order(e.container)
logger.debug(' [ipc] window new event - update layout')
layout = layouts.get(context.workspace.name)
layout.update(context, e.container)
state.handle_rebuild(context, e.container)
return _on_window_new
def on_window_floating(layouts: Layouts, state: State):
def _on_window_floating(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window floating event - container:{e.container.id}:{e.container.window}')
if is_floating_container(e.container):
if not state.is_last_container_rebuilt(e.container):
state.rebuild_action.container_id_to_focus = e.container.id
on_window_close(layouts, state)(i3l, e)
else:
state.rebuild_action.last_container_rebuilt = None
context = state.sync_context(i3l)
context.exec(f'[con_id={e.container.id}] floating disable')
else:
on_window_new(layouts, state)(i3l, e)
return _on_window_floating
def on_window_focus(layouts: Layouts, state: State):
def _on_window_focus(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window focus event - container:{e.container.id}:{e.container.window}')
context = state.sync_context(i3l)
layout = layouts.get(context.workspace.name)
focused_container = i3l.get_tree().find_focused()
if not is_layout_container(focused_container):
logger.debug(' [ipc] window focus event - not a layout container')
return
previous_mark = Mark.previous()
current_mark = Mark.current()
i3l.command(f'[con_mark="{current_mark}"] mark --add {previous_mark}')
i3l.command(f'[con_id="{focused_container.id}"] mark --add {current_mark}')
if layout is None:
logger.debug(' [ipc] window focus event - no workspace layout')
return
if layout.name != LayoutName.AUTOSPLIT:
logger.debug(' [ipc] window focus event - workspace layout not autosplit')
return
logger.debug(' [ipc] window focus event - update layout')
layout.update(context, focused_container)
return _on_window_focus
|
PypiClean
|
/mliamlib-1.0.7.tar.gz/mliamlib-1.0.7/mlib/memory.py
|
import os
from six.moves import cPickle
import resource
import mlib.formatting
import numpy as N
# Report memory footprint. This is an over-estimation, as Python requests always more memory than currently being used.
def report_current_memory_usage():
return mlib.formatting.SI(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * 1e3) + "B"
class TotalMemory(object):
""" Takes a snapshot of free host memory, then differences future values to figure out how much memory was taken up.
Don't expect reliability if other people are on the same system. """
def __init__(self):
self.baseline = TotalMemory.get_free_memory_gb()
@staticmethod
def get_free_memory_gb():
data = {}
with open('/proc/meminfo', 'r') as f:
for line in f:
key, value = line.strip().split(':')
if key == 'MemFree':
return float(value.strip().split(' ')[0]) / (1024 * 1024)
return N.nan
def __str__(self):
current = TotalMemory.get_free_memory_gb()
return '%.2f GB' % (self.baseline - current)
# Runs a function off in its own fork, lets it do its thing, yields its results, and destroys the fork to fully
# release memory
def run_and_reclaim_memory(func, *args, **kwds):
pread, pwrite = os.pipe()
pid = os.fork()
if pid > 0:
os.close(pwrite)
with os.fdopen(pread, 'rb') as f:
status, result = cPickle.load(f)
os.waitpid(pid, 0)
if status == 0:
return result
else:
raise result
else:
os.close(pread)
try:
result = func(*args, **kwds)
status = 0
except Exception as exc:
result = exc
status = 1
with os.fdopen(pwrite, 'wb') as f:
try:
cPickle.dump((status, result), f, cPickle.HIGHEST_PROTOCOL)
except cPickle.PicklingError as exc:
cPickle.dump((2, exc), f, cPickle.HIGHEST_PROTOCOL)
os._exit(0)
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/domain/AlipayEcoMycarMaintainOrderstatusUpdateModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MaintainOrderStatusExtParams import MaintainOrderStatusExtParams
class AlipayEcoMycarMaintainOrderstatusUpdateModel(object):
def __init__(self):
self._ext_param = None
self._industry_code = None
self._order_no = None
self._order_status = None
self._type = None
@property
def ext_param(self):
return self._ext_param
@ext_param.setter
def ext_param(self, value):
if isinstance(value, MaintainOrderStatusExtParams):
self._ext_param = value
else:
self._ext_param = MaintainOrderStatusExtParams.from_alipay_dict(value)
@property
def industry_code(self):
return self._industry_code
@industry_code.setter
def industry_code(self, value):
self._industry_code = value
@property
def order_no(self):
return self._order_no
@order_no.setter
def order_no(self, value):
self._order_no = value
@property
def order_status(self):
return self._order_status
@order_status.setter
def order_status(self, value):
self._order_status = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def to_alipay_dict(self):
params = dict()
if self.ext_param:
if hasattr(self.ext_param, 'to_alipay_dict'):
params['ext_param'] = self.ext_param.to_alipay_dict()
else:
params['ext_param'] = self.ext_param
if self.industry_code:
if hasattr(self.industry_code, 'to_alipay_dict'):
params['industry_code'] = self.industry_code.to_alipay_dict()
else:
params['industry_code'] = self.industry_code
if self.order_no:
if hasattr(self.order_no, 'to_alipay_dict'):
params['order_no'] = self.order_no.to_alipay_dict()
else:
params['order_no'] = self.order_no
if self.order_status:
if hasattr(self.order_status, 'to_alipay_dict'):
params['order_status'] = self.order_status.to_alipay_dict()
else:
params['order_status'] = self.order_status
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayEcoMycarMaintainOrderstatusUpdateModel()
if 'ext_param' in d:
o.ext_param = d['ext_param']
if 'industry_code' in d:
o.industry_code = d['industry_code']
if 'order_no' in d:
o.order_no = d['order_no']
if 'order_status' in d:
o.order_status = d['order_status']
if 'type' in d:
o.type = d['type']
return o
|
PypiClean
|
/referredby-0.1.4.tar.gz/referredby-0.1.4/README.rst
|
referredby
==========
.. image:: https://travis-ci.org/larsyencken/referredby.png
A Python module for parsing referrer URLs, in particular for common search engines.
It's main entry point is the ``referredby.who`` method::
>>> import referredby
>>> referredby.who('http://id.search.yahoo.com/search?fr=mkg030&p=friendly%20cat')
SearchEngine(name='Yahoo! Indonesia', domain='id.search.yahoo.com', keywords=['friendly', 'cat'])
The list of search engines that it matches is borrowed from Spiros Denaxas's `URI::ParseSearchString <https://github.com/spiros/URI-ParseSearchString>`_ project.
Release notes
=============
0.1.4
-----
- Add a bunch of new Google country domains
- Support Python 3
0.1.3
-----
- Add ``googleadservices.com`` domain
0.1.2
-----
- Improve matching of Yahoo mail
- Add mail engine matching to the ``who()`` method
0.1.1
-----
- Add more flexible matching of Yahoo search domains
.. image:: https://d2weczhvl823v0.cloudfront.net/larsyencken/referredby/trend.png
:alt: Bitdeli badge
:target: https://bitdeli.com/free
|
PypiClean
|
/xicam.SAXS-2.2.2.tar.gz/xicam.SAXS-2.2.2/xicam/SAXS/operations/qbackgroundfit.py
|
import numpy as np
from astropy.modeling import fitting, models
from astropy.modeling import Fittable1DModel
from xicam.plugins.operationplugin import OperationPlugin
from typing import Tuple
from enum import Enum
from xicam.plugins import manager as pluginmanager
class QBackgroundFit(OperationPlugin):
name = "Q Background Fit"
def __init__(self):
super(QBackgroundFit, self).__init__()
self.peakranges = []
def wireup_parameter(self, parameter):
parameter.sigValueChanged.connect(self.value_changed)
def value_changed(self, *args, **kwargs):
print(f"QBackgroundFit.value_changed: {args} {kwargs}")
self.find_peak_ranges()
def find_peak_ranges(self):
# FIXME: operation plugin has no reference to workflow..., how do we find peak ranges?
print("find_peak_ranges")
...
# must be a late import to avoid being picked up first by plugin manager
# from xicam.SAXS.operations.astropyfit import AstropyQSpectraFit
# thisindex = self._workflow.processes.index(self)
# self.peakranges = [(process.domainmin.value, process.domainmax.value)
# for process in self._workflow.processes[thisindex + 1:]
# if isinstance(process, AstropyQSpectraFit)]
def _func(self,
q: np.ndarray,
iq: np.ndarray,
model: Enum,
domain_min: float,
domain_max: float,
degree: int = 4):
model = models.Polynomial1D(degree=degree)
norange = domain_min == domain_max
if domain_min is None and q is not None or norange: # truncate the q and I arrays with limits
domain_min = q.min()
if domain_max is None and q is not None or norange: # truncate the q and I arrays with limits
domain_max = q.max()
filter = np.logical_and(domain_min <= q, q <= domain_max)
for peakrange in self.peakranges:
print('applying peak range:', peakrange)
filter &= np.logical_or(peakrange[0] >= q, q >= peakrange[1])
q = q[filter]
iq = iq[filter]
background_model = fitting.LinearLSQFitter()(model, q, iq)
background_profile = background_model.value(q)
raw_iq = iq.copy()
iq = iq - background_profile
return q, iq, background_model, background_profile, raw_iq
# class QBackgroundFit(ProcessingPlugin):
# name = 'Q Background Fit'
#
# q = InOut(description='Q bin center positions',
# type=np.array)
# Iq = InOut(description='Q spectra bin intensities', type=np.array)
# # model = Input(description='Fittable model class in the style of Astropy', type=Enum)
# domainmin = Input(description='Min bound on the domain of the input data', type=float)
# domainmax = Input(description='Max bound on the domain of the input data', type=float)
# degree = Input(name='Polynomial Degree', description='Polynomial degree number', type=int, min=1, default=4)
# # fitter = Input(description='Fitting algorithm', default=fitting.LevMarLSQFitter(), type=Enum, limits={'Linear LSQ':fitting.LinearLSQFitter(), 'Levenberg-Marquardt LSQ':fitting.LevMarLSQFitter(), 'SLSQP LSQ':fitting.SLSQPLSQFitter(), 'Simplex LSQ':fitting.SimplexLSQFitter()})
# domainfilter = Input(description='Domain limits where peaks will be fitted; auto-populated by ')
#
# backgroundmodel = Output(description='A new model with the fitted parameters; behaves as parameterized function',
# type=Fittable1DModel)
# backgroundprofile = Output(description='The fitted profile from the evaluation of the '
# 'resulting model over the input range.')
# rawIq = Output(description='The spectra data before subtraction.')
#
# hints = [PlotHint(q, Iq), PlotHint(q, backgroundprofile), PlotHint(q, rawIq)]
#
# modelvars = {}
#
# def __init__(self):
# super(QBackgroundFit, self).__init__()
# self.peakranges = []
#
# @property
# def parameter(self):
# self._workflow.attach(self.find_peak_ranges) # order may be bad...
# return super(QBackgroundFit, self).parameter
#
# def find_peak_ranges(self):
# from xicam.SAXS.operations.astropyfit import \
# AstropyQSpectraFit # must be a late import to avoid being picked up first by plugin manager
# thisindex = self._workflow.processes.index(self)
# self.peakranges = [(process.domainmin.value, process.domainmax.value)
# for process in self._workflow.processes[thisindex + 1:]
# if isinstance(process, AstropyQSpectraFit)]
#
# def detach(self):
# self._workflow.detach(self.find_peak_ranges)
#
# def evaluate(self):
# model = models.Polynomial1D(degree=self.degree.value)
#
# norange = self.domainmin.value == self.domainmax.value
# if self.domainmin.value is None and self.q.value is not None or norange: # truncate the q and I arrays with limits
# self.domainmin.value = self.q.value.min()
# if self.domainmax.value is None and self.q.value is not None or norange: # truncate the q and I arrays with limits
# self.domainmax.value = self.q.value.max()
#
# filter = np.logical_and(self.domainmin.value <= self.q.value, self.q.value <= self.domainmax.value)
# for peakrange in self.peakranges:
# print('applying peak range:', peakrange)
# filter &= np.logical_or(peakrange[0] >= self.q.value, self.q.value >= peakrange[1])
#
# q = self.q.value[filter]
# Iq = self.Iq.value[filter]
# self.backgroundmodel.value = fitting.LinearLSQFitter()(model, q, Iq)
# self.backgroundprofile.value = self.backgroundmodel.value(self.q.value)
# self.rawIq.value = self.Iq.value.copy()
# self.Iq.value = self.Iq.value - self.backgroundprofile.value
|
PypiClean
|
/itk_rtk-2.4.1-cp37-cp37m-macosx_10_9_x86_64.whl/itk/rtkFieldOfViewImageFilterPython.py
|
import collections
from sys import version_info as _version_info
if _version_info < (3, 7, 0):
raise RuntimeError("Python 3.7 or later required")
from . import _ITKCommonPython
from . import _RTKPython
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _rtkFieldOfViewImageFilterPython
else:
import _rtkFieldOfViewImageFilterPython
try:
import builtins as __builtin__
except ImportError:
import __builtin__
_swig_new_instance_method = _rtkFieldOfViewImageFilterPython.SWIG_PyInstanceMethod_New
_swig_new_static_method = _rtkFieldOfViewImageFilterPython.SWIG_PyStaticMethod_New
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import collections.abc
import itk.itkImagePython
import itk.ITKCommonBasePython
import itk.itkMatrixPython
import itk.itkPointPython
import itk.vnl_vector_refPython
import itk.vnl_vectorPython
import itk.vnl_matrixPython
import itk.stdcomplexPython
import itk.pyBasePython
import itk.itkFixedArrayPython
import itk.itkVectorPython
import itk.itkCovariantVectorPython
import itk.vnl_matrix_fixedPython
import itk.itkOffsetPython
import itk.itkSizePython
import itk.itkRGBAPixelPython
import itk.itkImageRegionPython
import itk.itkIndexPython
import itk.itkRGBPixelPython
import itk.itkSymmetricSecondRankTensorPython
import itk.itkInPlaceImageFilterAPython
import itk.itkImageToImageFilterAPython
import itk.itkImageSourcePython
import itk.itkImageSourceCommonPython
import itk.itkVectorImagePython
import itk.itkVariableLengthVectorPython
import itk.itkImageToImageFilterCommonPython
import itk.itkImageToImageFilterBPython
import itk.rtkThreeDCircularProjectionGeometryPython
import itk.rtkProjectionGeometryPython
def rtkFieldOfViewImageFilterID3ID3_New():
return rtkFieldOfViewImageFilterID3ID3.New()
class rtkFieldOfViewImageFilterID3ID3(itk.itkInPlaceImageFilterAPython.itkInPlaceImageFilterID3ID3):
r"""Proxy of C++ rtkFieldOfViewImageFilterID3ID3 class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
FOVRadiusType_RADIUSINF = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_FOVRadiusType_RADIUSINF
FOVRadiusType_RADIUSSUP = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_FOVRadiusType_RADIUSSUP
FOVRadiusType_RADIUSBOTH = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_FOVRadiusType_RADIUSBOTH
__New_orig__ = _swig_new_static_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3___New_orig__)
Clone = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_Clone)
GetGeometry = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetGeometry)
SetGeometry = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetGeometry)
GetMask = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetMask)
SetMask = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetMask)
GetProjectionsStack = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetProjectionsStack)
SetProjectionsStack = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetProjectionsStack)
GetDisplacedDetector = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetDisplacedDetector)
SetDisplacedDetector = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetDisplacedDetector)
GetInsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetInsideValue)
SetInsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetInsideValue)
GetOutsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_GetOutsideValue)
SetOutsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_SetOutsideValue)
ComputeFOVRadius = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_ComputeFOVRadius)
AddCollimationConstraints = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_AddCollimationConstraints)
__swig_destroy__ = _rtkFieldOfViewImageFilterPython.delete_rtkFieldOfViewImageFilterID3ID3
cast = _swig_new_static_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_cast)
def New(*args, **kargs):
"""New() -> rtkFieldOfViewImageFilterID3ID3
Create a new object of the class rtkFieldOfViewImageFilterID3ID3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
rtkFieldOfViewImageFilterID3ID3.New(reader, threshold=10)
is (most of the time) equivalent to:
obj = rtkFieldOfViewImageFilterID3ID3.New()
obj.SetInput(0, reader.GetOutput())
obj.SetThreshold(10)
"""
obj = rtkFieldOfViewImageFilterID3ID3.__New_orig__()
from itk.support import template_class
template_class.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
# Register rtkFieldOfViewImageFilterID3ID3 in _rtkFieldOfViewImageFilterPython:
_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_swigregister(rtkFieldOfViewImageFilterID3ID3)
rtkFieldOfViewImageFilterID3ID3___New_orig__ = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3___New_orig__
rtkFieldOfViewImageFilterID3ID3_cast = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterID3ID3_cast
def rtkFieldOfViewImageFilterIF3IF3_New():
return rtkFieldOfViewImageFilterIF3IF3.New()
class rtkFieldOfViewImageFilterIF3IF3(itk.itkInPlaceImageFilterAPython.itkInPlaceImageFilterIF3IF3):
r"""Proxy of C++ rtkFieldOfViewImageFilterIF3IF3 class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
FOVRadiusType_RADIUSINF = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_FOVRadiusType_RADIUSINF
FOVRadiusType_RADIUSSUP = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_FOVRadiusType_RADIUSSUP
FOVRadiusType_RADIUSBOTH = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_FOVRadiusType_RADIUSBOTH
__New_orig__ = _swig_new_static_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3___New_orig__)
Clone = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_Clone)
GetGeometry = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetGeometry)
SetGeometry = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetGeometry)
GetMask = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetMask)
SetMask = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetMask)
GetProjectionsStack = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetProjectionsStack)
SetProjectionsStack = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetProjectionsStack)
GetDisplacedDetector = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetDisplacedDetector)
SetDisplacedDetector = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetDisplacedDetector)
GetInsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetInsideValue)
SetInsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetInsideValue)
GetOutsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_GetOutsideValue)
SetOutsideValue = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_SetOutsideValue)
ComputeFOVRadius = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_ComputeFOVRadius)
AddCollimationConstraints = _swig_new_instance_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_AddCollimationConstraints)
__swig_destroy__ = _rtkFieldOfViewImageFilterPython.delete_rtkFieldOfViewImageFilterIF3IF3
cast = _swig_new_static_method(_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_cast)
def New(*args, **kargs):
"""New() -> rtkFieldOfViewImageFilterIF3IF3
Create a new object of the class rtkFieldOfViewImageFilterIF3IF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
rtkFieldOfViewImageFilterIF3IF3.New(reader, threshold=10)
is (most of the time) equivalent to:
obj = rtkFieldOfViewImageFilterIF3IF3.New()
obj.SetInput(0, reader.GetOutput())
obj.SetThreshold(10)
"""
obj = rtkFieldOfViewImageFilterIF3IF3.__New_orig__()
from itk.support import template_class
template_class.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
# Register rtkFieldOfViewImageFilterIF3IF3 in _rtkFieldOfViewImageFilterPython:
_rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_swigregister(rtkFieldOfViewImageFilterIF3IF3)
rtkFieldOfViewImageFilterIF3IF3___New_orig__ = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3___New_orig__
rtkFieldOfViewImageFilterIF3IF3_cast = _rtkFieldOfViewImageFilterPython.rtkFieldOfViewImageFilterIF3IF3_cast
from itk.support import helpers
import itk.support.types as itkt
from typing import Sequence, Tuple, Union
@helpers.accept_array_like_xarray_torch
def field_of_view_image_filter(*args: itkt.ImageLike, geometry=..., mask: bool=..., projections_stack: itkt.ImageBase=..., displaced_detector: bool=..., inside_value: float=..., outside_value: float=...,**kwargs)-> itkt.ImageSourceReturn:
"""Functional interface for FieldOfViewImageFilter"""
import itk
kwarg_typehints = { 'geometry':geometry,'mask':mask,'projections_stack':projections_stack,'displaced_detector':displaced_detector,'inside_value':inside_value,'outside_value':outside_value }
specified_kwarg_typehints = { k:v for (k,v) in kwarg_typehints.items() if kwarg_typehints[k] is not ... }
kwargs.update(specified_kwarg_typehints)
instance = itk.FieldOfViewImageFilter.New(*args, **kwargs)
return instance.__internal_call__()
def field_of_view_image_filter_init_docstring():
import itk
from itk.support import template_class
filter_class = itk.RTK.FieldOfViewImageFilter
field_of_view_image_filter.process_object = filter_class
is_template = isinstance(filter_class, template_class.itkTemplate)
if is_template:
filter_object = filter_class.values()[0]
else:
filter_object = filter_class
field_of_view_image_filter.__doc__ = filter_object.__doc__
|
PypiClean
|
/napalm-yang-0.1.0.tar.gz/napalm-yang-0.1.0/napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv4_reachability/prefixes/__init__.py
|
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import prefix
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv4-reachability/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS prefixes.
"""
__slots__ = ("_path_helper", "_extmethods", "__prefix")
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__prefix = YANGDynClass(
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv4-reachability",
"prefixes",
]
def _get_prefix(self):
"""
Getter method for prefix, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv4_reachability/prefixes/prefix (list)
YANG Description: IPv4 prefixes that are contained within MT reachability TLV.
"""
return self.__prefix
def _set_prefix(self, v, load=False):
"""
Setter method for prefix, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv4_reachability/prefixes/prefix (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix() directly.
YANG Description: IPv4 prefixes that are contained within MT reachability TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prefix must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,prefix.prefix, yang_name="prefix", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__prefix = t
if hasattr(self, "_set"):
self._set()
def _unset_prefix(self):
self.__prefix = YANGDynClass(
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
prefix = __builtin__.property(_get_prefix)
_pyangbind_elements = OrderedDict([("prefix", prefix)])
from . import prefix
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv4-reachability/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS prefixes.
"""
__slots__ = ("_path_helper", "_extmethods", "__prefix")
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__prefix = YANGDynClass(
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv4-reachability",
"prefixes",
]
def _get_prefix(self):
"""
Getter method for prefix, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv4_reachability/prefixes/prefix (list)
YANG Description: IPv4 prefixes that are contained within MT reachability TLV.
"""
return self.__prefix
def _set_prefix(self, v, load=False):
"""
Setter method for prefix, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv4_reachability/prefixes/prefix (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix() directly.
YANG Description: IPv4 prefixes that are contained within MT reachability TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prefix must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,prefix.prefix, yang_name="prefix", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__prefix = t
if hasattr(self, "_set"):
self._set()
def _unset_prefix(self):
self.__prefix = YANGDynClass(
base=YANGListType(
False,
prefix.prefix,
yang_name="prefix",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
prefix = __builtin__.property(_get_prefix)
_pyangbind_elements = OrderedDict([("prefix", prefix)])
|
PypiClean
|
/airflow_provider_huawei_cloud_demo-0.0.23-py3-none-any.whl/huawei_cloud_provider/hooks/dli.py
|
from __future__ import annotations
import os.path
import huaweicloudsdkdli.v1 as DliSdk
from huaweicloudsdkcore.auth.credentials import BasicCredentials
from huaweicloudsdkdli.v1.region.dli_region import DliRegion
from airflow.exceptions import AirflowException
from huawei_cloud_provider.hooks.base_huawei_cloud import HuaweiBaseHook
class DLIHook(HuaweiBaseHook):
"""Interact with Huawei Cloud DLI, using the huaweicloudsdkdli library."""
def create_queue(
self,
queue_name,
platform,
enterprise_project_id,
elastic_resource_pool_name,
feature,
resource_mode,
charging_mode,
description,
queue_type,
list_tags_body,
list_labels_body,
cu_count,
) -> DliSdk.CreateQueueResponse:
"""
Create a queue in DLI
:param queue_name: The name of the queue.
:param platform: The platform of the queue.
:param enterprise_project_id: The enterprise project ID of the queue.
:param elastic_resource_pool_name: The elastic resource pool name of the queue.
:param feature: The feature of the queue.
:param resource_mode: The resource mode of the queue.
:param charging_mode: The charging mode of the queue.
:param description: The description of the queue.
:param queue_type: The type of the queue.
:param list_tags_body: The tags of the queue.
:param list_labels_body: The labels of the queue.
:param cu_count: The CU count of the queue.
:return: The response of the queue creation.
:rtype: DliSdk.CreateQueueResponse
"""
if list_tags_body is not None and len(list_tags_body) > 10:
raise AirflowException("You can add up to 10 tags.")
try:
return self.get_dli_client().create_queue(
self.create_queue_request(
elastic_resource_pool_name=elastic_resource_pool_name,
list_tags_body=list_tags_body,
feature=feature,
list_labels_body=list_labels_body,
resource_mode=resource_mode,
platform=platform,
enterprise_project_id=enterprise_project_id
if enterprise_project_id is not None
else self.get_enterprise_project_id_from_extra_data(),
charging_mode=charging_mode,
cu_count=cu_count,
description=description,
queue_type=queue_type,
queue_name=queue_name,
)
)
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when creating: {e}")
def update_queue_cidr(self, queue_name, cidr_in_vpc) -> DliSdk.UpdateQueueCidrResponse:
"""
Update the CIDR of a queue in DLI
:param queue_name: The name of the queue.
:param cidr_in_vpc: The CIDR of the queue.
:return: The response of the queue update.
:rtype: DliSdk.UpdateQueueCidrResponse
"""
try:
return self.get_dli_client().update_queue_cidr(
self.update_queue_cidr_request(queue_name=queue_name, cidr_in_vpc=cidr_in_vpc)
)
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when updating: {e}")
def delete_queue(self, queue_name) -> DliSdk.DeleteQueueResponse:
"""
Delete a queue in DLI
:param queue_name: The name of the queue.
:return: The response of the queue deletion.
:rtype: DliSdk.DeleteQueueResponse
"""
try:
return self.get_dli_client().delete_queue(self.delete_queue_request(queue_name))
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when deleting: {e}")
def list_queues(
self, queue_type, tags, return_billing_info, return_permission_info
) -> DliSdk.ListQueuesResponse:
"""
List queues in DLI
:param queue_type: The type of the queue.
:param tags: The tags of the queue.
:param return_billing_info: Whether to return billing information.
:param return_permission_info: Whether to return permission information.
:return: The response of the queue listing.
:rtype: DliSdk.ListQueuesResponse
"""
try:
return self.get_dli_client().list_queues(
self.list_queues_request(
queue_type=queue_type,
tags=tags,
return_billing_info=return_billing_info,
return_permission_info=return_permission_info,
)
)
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when listing: {e}")
def create_batch_job(
self,
queue_name,
file,
class_name,
obs_bucket,
catalog_name,
image,
max_retry_times,
auto_recovery,
spark_version,
feature,
num_executors,
executor_cores,
executor_memory,
driver_cores,
driver_memory,
name,
list_conf_body,
list_groups_body,
list_resources_body,
list_modules_body,
list_files_body,
list_python_files_body,
list_jars_body,
sc_type,
list_args_body,
cluster_name,
) -> DliSdk.CreateBatchJobResponse:
"""
Create a batch job in DLI
:param queue_name: The name of the queue.
:param file: The file of the batch job.
:param class_name: The class name of the batch job.
:param obs_bucket: The OBS bucket of the batch job.
:param catalog_name: The catalog name of the batch job.
:param image: The image of the batch job.
:param max_retry_times: The maximum retry times of the batch job.
:param auto_recovery: Whether to enable auto recovery.
:param spark_version: The Spark version of the batch job.
:param feature: The feature of the batch job.
:param num_executors: The number of executors of the batch job.
:param executor_cores: The number of cores of the executor.
:param executor_memory: The memory of the executor.
:param driver_cores: The number of cores of the driver.
:param driver_memory: The memory of the driver.
:param name: The name of the batch job.
:param list_conf_body: The configuration of the batch job.
:param list_groups_body: The groups of the batch job.
:param list_resources_body: The resources of the batch job.
:param list_modules_body: The modules of the batch job.
:param list_files_body: The files of the batch job.
:param list_python_files_body: The Python files of the batch job.
:param list_jars_body: The JAR files of the batch job.
:param sc_type: The type of the Spark context.
:param list_args_body: The arguments of the batch job.
:param cluster_name: The name of the cluster.
:return: The response of the batch job creation.
:rtype: DliSdk.CreateBatchJobResponse
"""
try:
return self.get_dli_client().create_batch_job(
self.create_batch_job_request(
queue_name=queue_name,
file=file,
class_name=class_name,
obs_bucket=obs_bucket,
catalog_name=catalog_name,
image=image,
max_retry_times=max_retry_times,
auto_recovery=auto_recovery,
spark_version=spark_version,
feature=feature,
num_executors=num_executors,
executor_cores=executor_cores,
executor_memory=executor_memory,
driver_cores=driver_cores,
driver_memory=driver_memory,
name=name,
list_conf_body=list_conf_body,
list_groups_body=list_groups_body,
list_resources_body=list_resources_body,
list_modules_body=list_modules_body,
list_files_body=list_files_body,
list_python_files_body=list_python_files_body,
list_jars_body=list_jars_body,
sc_type=sc_type,
list_args_body=list_args_body,
cluster_name=cluster_name,
)
)
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when crating batch job: {e}")
def upload_files(self, paths, group) -> DliSdk.UploadFilesResponse:
"""
Upload files to DLI
:param paths: The paths of the files to be uploaded.
:param group: The group of the files to be uploaded.
:return: The response of the file upload.
:rtype: DliSdk.UploadFilesResponse
"""
try:
return self.get_dli_client().upload_files(self.upload_files_request(paths=paths, group=group))
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when uploading files: {e}")
def run_job(
self, sql_query, database_name, queue_name, list_conf_body, list_tags_body
) -> DliSdk.RunJobResponse:
"""
Run a job in DLI
:param sql_query: The SQL query of the job.
:param database_name: The database name of the job.
:param queue_name: The queue name of the job.
:param list_conf_body: The configuration of the job.
:param list_tags_body: The tags of the job.
:return: The response of the job run.
:rtype: DliSdk.RunJobResponse
"""
try:
if os.path.isfile(sql_query):
sql_file = open(sql_query)
sql_query = sql_file.read()
sql_file.close()
return self.get_dli_client().run_job(
self.run_job_request(
sql_query=sql_query,
database_name=database_name,
list_conf_body=list_conf_body,
list_tags_body=list_tags_body,
queue_name=queue_name,
)
)
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when running: {e}")
def show_batch_state(self, job_id) -> str:
"""
Get the state of a batch job
:param job_id: The ID of the batch job.
:return: The state of the batch job.
:rtype: str
"""
try:
response = self.get_dli_client().show_batch_state(self.show_batch_state_request(job_id))
return response.state
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when get batch state: {e}")
def show_job_status(self, job_id) -> str:
"""
Get the status of a job
:param job_id: The ID of the job.
:return: The status of the job.
:rtype: str
"""
try:
response = self.get_dli_client().show_job_status(self.show_job_status_request(job_id))
return response.status
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when get job status: {e}")
def get_dli_client(self) -> DliSdk.DliClient:
ak = self.conn.login
sk = self.conn.password
credentials = BasicCredentials(ak, sk, self.get_project_id())
return (
DliSdk.DliClient.new_builder()
.with_credentials(credentials)
.with_region(DliRegion.value_of(self.get_region()))
.build()
)
def show_job_status_request(self, job_id):
return DliSdk.ShowJobStatusRequest(job_id)
def show_batch_state_request(self, job_id):
return DliSdk.ShowBatchStateRequest(job_id)
def run_job_request(self, sql_query, database_name, queue_name, list_conf_body, list_tags_body):
request = DliSdk.RunJobRequest()
request.body = DliSdk.CommitJobReq(
queue_name=queue_name,
currentdb=database_name,
sql=sql_query,
tags=list_tags_body,
conf=list_conf_body,
)
return request
def upload_files_request(self, paths, group):
request = DliSdk.UploadFilesRequest()
request.body = DliSdk.UploadGroupPackageReq(group=group, paths=paths)
return request
def create_batch_job_request(
self,
queue_name,
file,
class_name,
obs_bucket,
catalog_name,
image,
max_retry_times,
auto_recovery,
spark_version,
feature,
num_executors,
executor_cores,
executor_memory,
driver_cores,
driver_memory,
name,
list_conf_body,
list_groups_body,
list_resources_body,
list_modules_body,
list_files_body,
list_python_files_body,
list_jars_body,
sc_type,
list_args_body,
cluster_name,
):
request = DliSdk.CreateBatchJobRequest()
request.body = DliSdk.CreateBatchJobReq(
queue=queue_name,
file=file,
class_name=class_name,
obs_bucket=obs_bucket,
catalog_name=catalog_name,
image=image,
max_retry_times=max_retry_times,
auto_recovery=auto_recovery,
spark_version=spark_version,
feature=feature,
num_executors=num_executors,
executor_cores=executor_cores,
executor_memory=executor_memory,
driver_cores=driver_cores,
driver_memory=driver_memory,
name=name,
conf=list_conf_body,
groups=list_groups_body,
resources=list_resources_body,
modules=list_modules_body,
files=list_files_body,
python_files=list_python_files_body,
jars=list_jars_body,
sc_type=sc_type,
args=list_args_body,
cluster_name=cluster_name,
)
return request
def list_queues_request(self, queue_type, tags, return_billing_info, return_permission_info):
return DliSdk.ListQueuesRequest(
queue_type=queue_type,
tags=tags,
with_charge_info=return_billing_info,
with_priv=return_permission_info,
)
def delete_queue_request(self, queue_name):
return DliSdk.DeleteQueueRequest(queue_name)
def update_queue_cidr_request(self, queue_name, cidr_in_vpc):
request = DliSdk.UpdateQueueCidrRequest()
request.queue_name = queue_name
request.body = DliSdk.UpdateQueueCidrReq(cidr_in_vpc=cidr_in_vpc)
return request
def create_queue_request(
self,
queue_name,
platform,
enterprise_project_id,
elastic_resource_pool_name,
feature,
resource_mode,
charging_mode,
description,
queue_type,
list_tags_body,
list_labels_body,
cu_count,
):
request = DliSdk.CreateQueueRequest()
request.body = DliSdk.CreateQueueReq(
elastic_resource_pool_name=elastic_resource_pool_name,
tags=list_tags_body,
feature=feature,
labels=list_labels_body,
resource_mode=resource_mode,
platform=platform,
enterprise_project_id=enterprise_project_id,
charging_mode=charging_mode,
cu_count=cu_count,
description=description,
queue_type=queue_type,
queue_name=queue_name,
)
return request
def get_job_result(self, job_id, queue_name) -> DliSdk.ShowJobResultResponse:
try:
response = self.get_dli_client().show_job_result(self.get_job_result_request(job_id, queue_name))
return response
except Exception as e:
self.log.error(e)
raise AirflowException(f"Errors when get job result: {e}")
def get_job_result_request(self, job_id, queue_name):
request = DliSdk.ShowJobResultRequest(job_id=job_id, queue_name=queue_name)
return request
|
PypiClean
|
/np_pipeline_qc-0.0.26-py3-none-any.whl/np_pipeline_qc/legacy/probeSync_qc.py
|
from __future__ import division
import glob
import json
import logging
import os
import pdb
import re
# for vsync alignment
from typing import Union
from xml.dom.minidom import parse
import numpy as np
import pandas as pd
import scipy.spatial.distance as distance
import visual_behavior
from matplotlib import pyplot as plt
from np_pipeline_qc.legacy import ecephys
def getUnitData(probeBase, syncDataset):
probeSpikeDir = os.path.join(probeBase, r'continuous\\Neuropix-PXI-100.0')
# Get barcodes/times from probe events and sync file
be_t, be = get_ephys_barcodes(probeBase)
be_t, be = cut_bad_barcodes(be_t, be, 'ephys', threshold=30.8)
bs_t, bs = get_sync_barcodes(syncDataset)
bs_t, bs = cut_bad_barcodes(bs_t, bs, 'sync')
# Compute time shift between ephys and sync
shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset(
bs_t, bs, be_t, be, 0, 30000
)
# Get unit spike times
units = load_spike_info(probeSpikeDir, p_sampleRate, shift)
return units
def get_ephys_barcodes(probeBase):
probeTTLDir = os.path.join(probeBase, r'events\\Neuropix-PXI-100.0\\TTL_1')
channel_states = np.load(os.path.join(probeTTLDir, 'channel_states.npy'))
event_times = np.load(os.path.join(probeTTLDir, 'event_timestamps.npy'))
beRising = event_times[channel_states > 0] / 30000.0
beFalling = event_times[channel_states < 0] / 30000.0
be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling)
return be_t, be
def get_sync_barcodes(sync_dataset, fallback_line=0):
lines = sync_dataset.line_labels
# look for barcodes in labels
bline = fallback_line
for line in lines:
if 'barcode' in line:
bline = line
bRising = sync_dataset.get_rising_edges(bline, units='seconds')
bFalling = sync_dataset.get_falling_edges(bline, units='seconds')
bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling)
return bs_t, bs
def cut_bad_barcodes(bs_t, bs, source, threshold=30.95):
if any(np.diff(bs_t) < 30.95):
logging.warning(
'Detected bad barcode interval in {}, truncating data'.format(
source
)
)
# find bad barcodes
bad_intervals = np.where(np.diff(bs_t) < threshold)[0]
bad_barcode_indices = [bi + 1 for bi in bad_intervals]
# find largest block of good barcodes to use for probe sample rate/offset
bbi = np.insert(bad_barcode_indices, 0, 0)
bbi = np.append(bbi, len(bs_t))
good_block_sizes = np.diff(bbi)
largest_block = np.argmax(good_block_sizes)
barcode_interval_to_use = [
bbi[largest_block],
bbi[largest_block + 1] - 1,
]
bs_t = bs_t[barcode_interval_to_use[0] : barcode_interval_to_use[1]]
bs = bs[barcode_interval_to_use[0] : barcode_interval_to_use[1]]
return bs_t, bs
def build_unit_table(probes_to_run, paths, syncDataset):
### GET UNIT METRICS AND BUILD UNIT TABLE ###
probe_dirs = [[paths['probe' + pid], pid] for pid in probes_to_run]
probe_dict = {a[1]: {} for a in probe_dirs}
successful_probes = []
for p in probe_dirs:
print(p)
try:
print(
'########## Getting Units for probe {} ###########'.format(
p[1]
)
)
probe = p[1]
full_path = p[0]
# Get unit metrics for this probe
metrics_file = os.path.join(
full_path, 'continuous\\Neuropix-PXI-100.0\\metrics.csv'
)
unit_metrics = pd.read_csv(metrics_file)
unit_metrics = unit_metrics.set_index('cluster_id')
# Get unit data
units = getUnitData(full_path, syncDataset)
units = pd.DataFrame.from_dict(units, orient='index')
units['cluster_id'] = units.index.astype(int)
units = units.set_index('cluster_id')
# units['probe'] = p
# units['uid'] = units['probe'] + units.index.astype(str)
units = pd.merge(
unit_metrics,
units,
left_index=True,
right_index=True,
how='outer',
)
units['probe'] = probe
units['uid'] = units['probe'] + units.index.astype(str)
units = units.set_index('uid')
probe_dict[probe] = units
successful_probes.append(probe)
except Exception as E:
logging.error(E)
print('successful probes:', successful_probes)
# return {k:probe_dict[k] for k in successful_probes}
return pd.concat([probe_dict[k] for k in successful_probes])
def map_probe_from_slot_port(pinfo):
probenames = [None, None, ' ABC', ' DEF']
slot = int(pinfo['slot'])
port = int(pinfo['port'])
probename = probenames[slot][port]
return probename
def get_probe_settings_from_xml(xmlfilepath):
settings = parse(xmlfilepath)
probes = settings.getElementsByTagName('PROBE')
probe_info_dict = {}
for probe in probes:
pinfo = {}
for attr in probe.attributes.items():
pinfo[attr[0]] = attr[1]
probename = map_probe_from_slot_port(pinfo)
probe_info_dict[probename] = pinfo
return probe_info_dict
def get_sync_line_data(syncDataset, line_label=None, channel=None):
"""Get rising and falling edge times for a particular line from the sync h5 file
Parameters
----------
dataset: sync file dataset generated by sync.Dataset
line_label: string specifying which line to read, if that line was labelled during acquisition
channel: integer specifying which channel to read in line wasn't labelled
Returns
----------
rising: npy array with rising edge times for specified line
falling: falling edge times
"""
if isinstance(line_label, str):
try:
channel = syncDataset.line_labels.index(line_label)
except:
print('Invalid line label')
return
elif channel is None:
print('Must specify either line label or channel id')
return
rising = syncDataset.get_rising_edges(channel, units='seconds')
falling = syncDataset.get_falling_edges(channel, units='seconds')
return rising, falling
def load_spike_info(spike_data_dir, p_sampleRate, shift):
"""Make dictionary with spike times, templates, sorting label and peak channel for all units
Parameters
-----------
spike_data_dir: path to directory with clustering output files
p_sampleRate: probe sampling rate according to master clock
shift: time shift between master and probe clock
p_sampleRate and shift are outputs from 'get_probe_time_offset' function
sortMode: if KS, read in automatically generated labels from Kilosort; if phy read in phy labels
Returns
----------
units: dictionary with spike info for all units
each unit is integer key, so units[0] is a dictionary for spike cluster 0 with keys
'label': sorting label for unit, eg 'good', 'mua', or 'noise'
'times': spike times in seconds according to master clock
'template': spike template, should be replaced by waveform extracted from raw data
averaged over 1000 randomly chosen spikes
'peakChan': channel where spike template has minimum, used to approximate unit location
"""
print(p_sampleRate)
print(shift)
spike_clusters = np.load(
os.path.join(spike_data_dir, 'spike_clusters.npy')
)
spike_times = np.load(os.path.join(spike_data_dir, 'spike_times.npy'))
templates = np.load(os.path.join(spike_data_dir, 'templates.npy'))
spike_templates = np.load(
os.path.join(spike_data_dir, 'spike_templates.npy')
)
channel_positions = np.load(
os.path.join(spike_data_dir, 'channel_positions.npy')
)
amplitudes = np.load(os.path.join(spike_data_dir, 'amplitudes.npy'))
unit_ids = np.unique(spike_clusters)
units = {}
for u in unit_ids:
ukey = str(u)
units[ukey] = {}
unit_idx = np.where(spike_clusters == u)[0]
unit_sp_times = spike_times[unit_idx] / p_sampleRate - shift
units[ukey]['times'] = unit_sp_times
# choose 1000 spikes with replacement, then average their templates together
chosen_spikes = np.random.choice(unit_idx, 1000)
chosen_templates = spike_templates[chosen_spikes].flatten()
units[ukey]['template'] = np.mean(templates[chosen_templates], axis=0)
units[ukey]['peakChan'] = np.unravel_index(
np.argmin(units[ukey]['template']), units[ukey]['template'].shape
)[1]
units[ukey]['position'] = channel_positions[units[ukey]['peakChan']]
units[ukey]['amplitudes'] = amplitudes[unit_idx]
# #check if this unit is noise
# peakChan = units[ukey]['peakChan']
# temp = units[ukey]['template'][:, peakChan]
# pt = findPeakToTrough(temp, plot=False)
# units[ukey]['peakToTrough'] = pt
return units
def getLFPData(probeBase, syncDataset, num_channels=384):
probeTTLDir = os.path.join(probeBase, r'events\\Neuropix-PXI-100.0\\TTL_1')
lfp_data_dir = os.path.join(probeBase, r'continuous\\Neuropix-PXI-100.1')
lfp_data_file = os.path.join(lfp_data_dir, 'continuous.dat')
if not os.path.exists(lfp_data_file):
print('Could not find LFP data at ' + lfp_data_file)
return None, None
lfp_data = np.memmap(lfp_data_file, dtype='int16', mode='r')
lfp_data_reshape = np.reshape(
lfp_data, [int(lfp_data.size / num_channels), -1]
)
time_stamps = np.load(os.path.join(lfp_data_dir, 'lfp_timestamps.npy'))
bRising, bFalling = get_sync_line_data(syncDataset, channel=0)
bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling)
channel_states = np.load(os.path.join(probeTTLDir, 'channel_states.npy'))
event_times = np.load(os.path.join(probeTTLDir, 'event_timestamps.npy'))
beRising = event_times[channel_states > 0] / 30000.0
beFalling = event_times[channel_states < 0] / 30000.0
be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling)
# Compute time shift between ephys and sync
shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset(
bs_t, bs, be_t, be, 0, 30000
)
time_stamps_shifted = (time_stamps / p_sampleRate) - shift
return lfp_data_reshape, time_stamps_shifted
def build_lfp_dict(probe_dirs, syncDataset):
lfp_dict = {}
for ip, probe in enumerate(probe_dirs):
# p_name = probe.split('_')[-2][-1]
p_name = re.findall('probe[A-F]', probe)[0][-1]
lfp, time = getLFPData(probe, syncDataset)
lfp_dict[p_name] = {'time': time, 'lfp': lfp}
return lfp_dict
def get_surface_channels(probe_dirs):
pass
def get_frame_offsets(sync_dataset, frame_counts, tolerance=0):
''' Tries to infer which vsyncs correspond to the frames in the epochs in frame_counts
This allows you to align data even when there are aborted stimuli
INPUTS:
sync_dataset: sync data from experiment (a 'Dataset' object made from the H5 file)
frame_counts: list of the expected frame counts (taken from pkl files) for each
of the stimuli in question;
the list should be ordered by the display sequence
tolerance: percent by which frame counts are allowed to deviate from expected
OUTPUTS:
start_frames: list of the inferred start frames for each of the stimuli
'''
frame_counts = np.array(frame_counts)
tolerance = tolerance/100.0
# get vsyncs and stim_running signals from sync
vf = get_vsyncs(sync_dataset)
stimstarts, stimoffs = get_stim_starts_ends(sync_dataset)
print(stimstarts)
print(stimoffs)
print(len(vf))
# get vsync frame lengths for all stimuli
epoch_frame_counts = []
epoch_start_frames = []
for start, end in zip(stimstarts, stimoffs):
epoch_frames = np.where((vf>start)&(vf<end))[0]
epoch_frame_counts.append(len(epoch_frames))
epoch_start_frames.append(epoch_frames[0])
print(epoch_frame_counts)
print(frame_counts)
start_frames = []
for ind, fc in enumerate(frame_counts):
start_ind = np.where(epoch_frame_counts == fc)[0]
if len(start_ind)==0:
print(f'Could not find start frame for stimulus {ind}')
start_frames.append(np.nan)
continue
start_frames.append(epoch_start_frames[int(start_ind)])
# if len(epoch_frame_counts)>len(frame_counts):
# logging.warning('Found extra stim presentations. Inferring start frames')
#
# start_frames = []
# for stim_num, fc in enumerate(frame_counts):
#
# print('finding stim start for stim {}'.format(stim_num))
# best_match = np.argmin([np.abs(e-fc) for e in epoch_frame_counts])
# if fc*(1-tolerance) <= epoch_frame_counts[best_match] <= fc*(1+tolerance):
# _ = epoch_frame_counts.pop(best_match)
# start_frame = epoch_start_frames.pop(best_match)
# start_frames.append(start_frame)
# print('found stim start at vsync {}'.format(start_frame))
#
# else:
# logging.error('Could not find matching sync frames for stim {}'.format(stim_num))
# return
#
#
# else:
# start_frames = epoch_start_frames
return start_frames
def get_bad_vsync_indices(sync_dataset):
"""find bad vsyncs if the sync drops data"""
bs_t, bs = get_sync_barcodes(sync_dataset)
barcode_intervals = np.diff(bs_t)
median_barcode_interval = np.median(barcode_intervals)
bad_intervals = np.where(barcode_intervals < 30.95)[0]
bad_barcode_indices = [bi + 1 for bi in bad_intervals]
bad_barcode_intervals = []
for bi in bad_barcode_indices:
# find last good barcode before bad one
last_good_barcode = bi
while last_good_barcode in bad_barcode_indices:
last_good_barcode = last_good_barcode - 1
# find next good barcode after bad one
next_good_barcode = bi
while next_good_barcode in bad_barcode_indices:
next_good_barcode = next_good_barcode + 1
bad_barcode_intervals.append([last_good_barcode, next_good_barcode])
# find the indices for the vsyncs that need to be interpolated
bad_synctime_intervals = [
[bs_t[a], bs_t[b]] for a, b in bad_barcode_intervals
]
time_lost_per_interval = [
(b - a) * median_barcode_interval for a, b in bad_barcode_intervals
]
vsyncs = get_vsyncs(sync_dataset)
vsync_patch_indices = [
[np.searchsorted(vsyncs, a), np.searchsorted(vsyncs, b)]
for a, b in bad_synctime_intervals
]
return vsync_patch_indices, time_lost_per_interval
def patch_vsyncs(sync_dataset, behavior_data, mapping_data, replay_data):
"""Hack to patch bad vsync intervals if sync drops data"""
behavior_vsync_intervals = behavior_data['items']['behavior'][
'intervalsms'
]
mapping_vsync_intervals = mapping_data['intervalsms']
replay_vsync_intervals = replay_data['intervalsms']
concatenated_intervals = (
np.concatenate(
(
behavior_vsync_intervals,
[np.nan],
mapping_vsync_intervals,
[np.nan],
replay_vsync_intervals,
)
)
/ 1000.0
)
vsyncs = get_vsyncs(sync_dataset)
vsync_intervals = np.diff(vsyncs)
bad_vsync_indices, time_lost_per_interval = get_bad_vsync_indices(
sync_dataset
)
for bad_inds, time_lost in zip(bad_vsync_indices, time_lost_per_interval):
bad_ind_start, bad_ind_end = bad_inds
# cut out bad section
vsync_intervals = np.concatenate(
(vsync_intervals[:bad_ind_start], vsync_intervals[bad_ind_end:])
)
# paste in vsyncs from the pickle files
pkl_start_ind = bad_ind_start
pkl_end_ind = bad_ind_start
pkl_time = 0
while pkl_time < time_lost:
pkl_end_ind = pkl_end_ind + 1
pkl_time = np.sum(
concatenated_intervals[pkl_start_ind:pkl_end_ind]
)
vsync_intervals = np.insert(
vsync_intervals,
bad_ind_start,
concatenated_intervals[pkl_start_ind:pkl_end_ind],
)
vsyncs_corrected = vsyncs[0] + np.cumsum(np.insert(vsync_intervals, 0, 0))
return vsyncs_corrected
def get_running_from_pkl(pkl):
key = 'behavior' if 'behavior' in pkl['items'] else 'foraging'
intervals = (
pkl['items']['behavior']['intervalsms']
if 'intervalsms' not in pkl
else pkl['intervalsms']
)
time = np.insert(np.cumsum(intervals), 0, 0) / 1000.0
dx, vsig, vin = [
pkl['items'][key]['encoders'][0][rkey]
for rkey in ('dx', 'vsig', 'vin')
]
run_speed = visual_behavior.analyze.compute_running_speed(
dx[: len(time)], time, vsig[: len(time)], vin[: len(time)]
)
return dx, run_speed
def get_vsyncs(sync_dataset, fallback_line=2):
lines = sync_dataset.line_labels
# look for vsyncs in labels
vsync_line = fallback_line
for line in lines:
if 'vsync' in line:
vsync_line = line
rising_edges = sync_dataset.get_rising_edges(vsync_line, units='seconds')
falling_edges = sync_dataset.get_falling_edges(vsync_line, units='seconds')
# ignore the first falling edge if it isn't preceded by a rising edge
return falling_edges[falling_edges > rising_edges[0]]
def get_stim_starts_ends(sync_dataset, fallback_line=5):
lines = sync_dataset.line_labels
# look for vsyncs in labels
if 'stim_running' in lines:
stim_line = 'stim_running'
else:
stim_line = fallback_line
stim_ons = sync_dataset.get_rising_edges(stim_line, units='seconds')
stim_offs = sync_dataset.get_falling_edges(stim_line, units='seconds')
if stim_offs[0] < stim_ons[0]:
logging.warning('Found extra stim off. Truncating.')
stim_offs = stim_offs[1:]
if len(stim_offs) != len(stim_ons):
logging.warning(
'Found {} stim starts, but {} stim offs. \
Sync signal is suspect...'.format(
len(stim_ons), len(stim_offs)
)
)
return stim_ons, stim_offs
def get_diode_times(sync_dataset, fallback_line=4):
lines = sync_dataset.line_labels
diode_line = fallback_line
for line in lines:
if 'photodiode' in line:
diode_line = line
rising_edges = sync_dataset.get_rising_edges(diode_line, units='seconds')
falling_edges = sync_dataset.get_falling_edges(diode_line, units='seconds')
return rising_edges, falling_edges
def get_monitor_lag(syncDataset):
dioder, diodef = get_diode_times(syncDataset)
vf = get_vsyncs(syncDataset)
lag = np.min([np.min(np.abs(d - vf[60])) for d in [diodef, dioder]])
return lag
def get_lick_times(sync_dataset, fallback_line=31):
lines = sync_dataset.line_labels
lick_line = fallback_line
for line in lines:
if 'lick' in line:
lick_line = line
lick_times = sync_dataset.get_rising_edges(lick_line, units='seconds')
return lick_times
### FUNCTIONS TO GET THE FRAME TIMES AND REMOVE DROPPED FRAMES
def extract_lost_frames_from_json(cam_json):
lost_count = cam_json['RecordingReport']['FramesLostCount']
if lost_count == 0:
return []
lost_string = cam_json['RecordingReport']['LostFrames'][0]
lost_spans = lost_string.split(',')
lost_frames = []
for span in lost_spans:
start_end = span.split('-')
if len(start_end) == 1:
lost_frames.append(int(start_end[0]))
else:
lost_frames.extend(
np.arange(int(start_end[0]), int(start_end[1]) + 1)
)
return (
np.array(lost_frames) - 1
) # you have to subtract one since the json starts indexing at 1 according to Totte
def get_frame_exposure_times(sync_dataset, cam_json):
if isinstance(cam_json, str):
cam_json = read_json(cam_json)
exposure_sync_line_label_dict = {
'Eye': 'eye_cam_exposing',
'Face': 'face_cam_exposing',
'Behavior': 'beh_cam_exposing',
}
cam_label = cam_json['RecordingReport']['CameraLabel']
sync_line = exposure_sync_line_label_dict[cam_label]
exposure_times = sync_dataset.get_rising_edges(sync_line, units='seconds')
lost_frames = extract_lost_frames_from_json(cam_json)
frame_times = [
e for ie, e in enumerate(exposure_times) if ie not in lost_frames
]
return np.array(frame_times)
def read_json(jsonfilepath):
with open(jsonfilepath, 'r') as f:
contents = json.load(f)
return contents
###Functions to improve frame syncing###
def partition_vsyncs(sync):
vsync_times = sync.get_falling_edges('vsync_stim', 'seconds')
stimstarts, stimends = get_stim_starts_ends(sync)
final_vsyncs = []
for ie, (start, end) in enumerate(zip(stimstarts, stimends)):
epoch_vsyncs = vsync_times[
(vsync_times >= start) & (vsync_times <= end)
]
final_vsyncs.extend(epoch_vsyncs)
return final_vsyncs
def get_experiment_frame_times(sync, photodiode_cycle=60, method='ccb'):
photodiode_times = np.sort(
np.concatenate(
[
sync.get_rising_edges('stim_photodiode', 'seconds'),
sync.get_falling_edges('stim_photodiode', 'seconds'),
]
)
)
vsync_times = sync.get_falling_edges('vsync_stim', 'seconds')
stimstarts, stimends = get_stim_starts_ends(sync)
ccb_frame_times = []
for ie, (start, end) in enumerate(zip(stimstarts, stimends)):
epoch_vsyncs = vsync_times[
(vsync_times >= start) & (vsync_times <= end)
]
epoch_photodiodes = photodiode_times[
(photodiode_times >= start) & (photodiode_times <= end)
]
frame_duration = estimate_frame_duration(
epoch_photodiodes, cycle=photodiode_cycle
)
ccb_times = get_ccb_frame_times(
epoch_vsyncs,
epoch_photodiodes,
photodiode_cycle,
frame_duration,
method=method,
)
ccb_frame_times.append(ccb_times)
all_ccb_times = np.concatenate(ccb_frame_times)
return all_ccb_times
def get_ccb_frame_times(
vsyncs, photodiode_times, photodiode_cycle, frame_duration, method='ccb'
):
# removes blinking at beginning and end of each stimulus
photodiode_times = trim_border_pulses(photodiode_times, vsyncs)
# not totally sure... correcting for on/off photodiode asymmetry
photodiode_times = correct_on_off_effects(photodiode_times)
# fix blips in the line
photodiode_times = fix_unexpected_edges(
photodiode_times, cycle=photodiode_cycle
)
if method == 'ccb':
return compute_frame_times_ccb(
vsyncs, photodiode_times, frame_duration, 60
)
elif method == 'tech':
return compute_vbn_block_frame_times(
vsyncs, photodiode_times, frame_duration, 60
)
def compute_frame_times_ccb(
vsyncs,
photodiode_times,
frame_duration,
cycle,
):
num_frames = len(vsyncs)
starts = np.zeros(num_frames, dtype=float)
vdiffs = np.diff(vsyncs)
# trim photodiode times to make sure there are no strays at the end
photodiode_times = photodiode_times[: int(np.floor(len(vsyncs) / 60) + 1)]
print('num photodiode intervals used {}'.format(len(photodiode_times)))
for start_index, (start_time, end_time) in enumerate(
zip(photodiode_times[:-1], photodiode_times[1:])
):
interval_duration = end_time - start_time
these_vsyncs = vsyncs[start_index * cycle : (start_index + 1) * cycle]
these_vdiffs = vdiffs[start_index * cycle : (start_index + 1) * cycle]
# frame_duration = np.median(np.diff(these_vsyncs))
long_frame_time = np.sum(these_vdiffs) - cycle * frame_duration
long_frame_time_in_frames = int(
np.round(long_frame_time / frame_duration)
)
extra_time_in_frames = (
int(np.around((interval_duration) / frame_duration)) - cycle
)
extra_monitor_lag_in_frames = 0
if long_frame_time_in_frames < extra_time_in_frames:
print('extra monitor lag detected')
print(long_frame_time_in_frames)
print(extra_time_in_frames)
print(start_time)
extra_monitor_lag_in_frames = (
extra_time_in_frames - long_frame_time_in_frames
)
local_frame_duration = interval_duration / (
cycle + extra_time_in_frames
)
# ideal_vsyncs = np.arange(start_time, end_time, frame_duration)
if abs(extra_time_in_frames) > 0:
# find long frames and shift times accordingly
frame_diffs = np.round(
np.diff(these_vsyncs) / local_frame_duration
)
relative_vsyncs = np.insert(
np.cumsum(frame_diffs * local_frame_duration), 0, 0
)
# assume that if there was a change in monitor lag, it was after the long frame
longest_ind = np.argmax(these_vdiffs) + 1
relative_vsyncs[longest_ind:] += (
extra_monitor_lag_in_frames * local_frame_duration
)
else:
frame_diffs = np.ones(cycle - 1)
relative_vsyncs = np.insert(
np.cumsum(frame_diffs * local_frame_duration), 0, 0
)
starts[start_index * cycle : (start_index + 1) * cycle] = (
relative_vsyncs + start_time
)
# Now deal with leftover frames that occur after the last diode transition
# Just take the global frame duration for these
num_leftover_frames = num_frames - (len(photodiode_times) - 1) * 60
leftover_frames_first_ind = len(starts) - num_leftover_frames #
# leftover_frames_first_ind = len(starts) - np.mod(len(starts), cycle)
these_vsyncs = vsyncs[leftover_frames_first_ind:]
frame_diffs = np.round(np.diff(these_vsyncs) / frame_duration)
print(
'processing {} leftover frames after last diode transition'.format(
len(these_vsyncs)
)
)
relative_vsyncs = np.insert(np.cumsum(frame_diffs * frame_duration), 0, 0)
starts[leftover_frames_first_ind:] = photodiode_times[-1] + relative_vsyncs
return starts
def set_corrected_times(
corrected_frame_times, vsync_slice, start_time, corrected_relevant_vsyncs
):
"""
This method copies the corrected_relevant_vsyncs
over to the corrected_frame_times
Parameters
----------
corrected_frame_times : np.ndarray
The corrected frames
vsync_slice : Slice
The interval to insert the new frames
stim_start : float
The start time of the stimulus
corrected_relevant_vsyncs : np.ndarray
The full list of corrected frames
"""
if vsync_slice.stop < len(corrected_frame_times):
corrected_frame_times[vsync_slice] = (
start_time + corrected_relevant_vsyncs
)
else:
# TODO - is this correct? The lengths and shapes do not always line up
corrected_frame_times[
vsync_slice.start : len(corrected_frame_times)
] = (
start_time
+ corrected_relevant_vsyncs[
0 : len(corrected_frame_times) - vsync_slice.start
]
)
def compute_vbn_block_frame_times(
partitioned_vsync_times: np.ndarray,
partitioned_photodiode_times: np.ndarray,
expected_vsync_duration: float,
num_vsyncs_per_diode_toggle: int = 60,
) -> np.ndarray:
num_vsyncs = len(partitioned_vsync_times)
corrected_frame_times = np.zeros(num_vsyncs, dtype=float)
vsync_durations = np.diff(partitioned_vsync_times)
cycle = num_vsyncs_per_diode_toggle
pd_intervals = zip(
partitioned_photodiode_times[:-1], partitioned_photodiode_times[1:]
)
for pd_interval_ind, (start_time, end_time) in enumerate(pd_intervals):
# Get duration of the current on->off/off->on photodiode interval
pd_interval_duration = end_time - start_time
# Get only vsync event times and vsync interval durations
# associated with current photodiode on/off interval
vsync_slice = slice(
pd_interval_ind * num_vsyncs_per_diode_toggle,
(pd_interval_ind + 1) * num_vsyncs_per_diode_toggle,
)
relevant_vsyncs = partitioned_vsync_times[vsync_slice]
relevant_vsync_durations = vsync_durations[vsync_slice]
# Determine number of "long" vsyncs
# (vsyncs that are double the duration of normal vsyncs)
expected_pd_interval_duration = (
num_vsyncs_per_diode_toggle * expected_vsync_duration
)
excess_pd_interval_duration = (
np.sum(relevant_vsync_durations) - expected_pd_interval_duration
)
# We should only be long by multiples of vsync duration
num_long_vsyncs = int(
np.around(excess_pd_interval_duration / expected_vsync_duration)
)
# Determine total delay (sum of all sources of delay)
# in units of 'vsyncs'
# Total delay changes can only happen in whole 'vsyncs',
# never in fractions of 'vsyncs' (hence rounding)
total_delay = (
int(np.around((pd_interval_duration / expected_vsync_duration)))
- num_vsyncs_per_diode_toggle
)
# If our total_delay is more than we would expect from just long vsyncs
# then extra frame to monitor delay occurred
extra_frame_to_monitor_delay = 0
if total_delay > num_long_vsyncs:
print(
"""Extra delay between frame time
and monitor display time detected"""
)
# Delay attributed to hardware/software factors that delay time
# to monitor display (in units of 'vsyncs') must then be:
extra_frame_to_monitor_delay = total_delay - num_long_vsyncs
# Number of actual frames/vsyncs that would fit
# in a photodiode switch interval
local_expected_vsync_duration = pd_interval_duration / (
num_vsyncs_per_diode_toggle + total_delay
)
if total_delay > 0:
# Correct for variability in vsync times
variance_reduced_frame_diffs = np.round(
np.diff(relevant_vsyncs) / local_expected_vsync_duration
)
# NJM - Want first vsync to happen at diode transition
# NJM - Is the 0th vsync happening before or after first
# photodiode transition? There could be 1-off error (double check)
# Will need to check empirically when implementing
result = (
variance_reduced_frame_diffs * local_expected_vsync_duration
)
corrected_relevant_vsyncs = np.insert(np.cumsum(result), 0, 0)
# Then correct for extra_frame_to_monitor_delay if there was any
# Assume that if there was a change
# in monitor lag, it was after the long frame
longest_ind = np.argmax(relevant_vsync_durations) + 1
corrected_relevant_vsyncs[longest_ind:] += (
extra_frame_to_monitor_delay * local_expected_vsync_duration
)
set_corrected_times(
corrected_frame_times,
vsync_slice,
start_time,
corrected_relevant_vsyncs,
)
else:
frame_diffs = np.ones(cycle - 1)
corrected_relevant_vsyncs = np.insert(
np.cumsum(frame_diffs * local_expected_vsync_duration), 0, 0
)
set_corrected_times(
corrected_frame_times,
vsync_slice,
start_time,
corrected_relevant_vsyncs,
)
# Now deal with leftover vsyncs that occur after the last diode transition
# Just take the global frame duration for these
leftover_vsyncs_start_ind = len(partitioned_vsync_times) - np.mod(
len(partitioned_vsync_times), num_vsyncs_per_diode_toggle
)
relevant_vsyncs = partitioned_vsync_times[leftover_vsyncs_start_ind:]
frame_diffs = np.round(np.diff(relevant_vsyncs) / expected_vsync_duration)
corrected_relevant_vsyncs = np.insert(
np.cumsum(frame_diffs * expected_vsync_duration), 0, 0
)
corrected_frame_times[leftover_vsyncs_start_ind:] = (
partitioned_photodiode_times[-1] + corrected_relevant_vsyncs
)
return corrected_frame_times
def trim_border_pulses(
pd_times, vs_times, frame_interval=1 / 60, num_frames=3
):
pd_times = np.array(pd_times)
pd_times = pd_times[
np.logical_and(
pd_times >= vs_times[0],
pd_times <= vs_times[-1] + num_frames * frame_interval,
)
]
print('last photodiode transition {}'.format(pd_times[-1]))
print(
'last vsyncs time plus buffer {}'.format(
vs_times[-1] + num_frames * frame_interval
)
)
print(
'num expected diode transitions {}'.format(
np.floor(len(vs_times) / 60) + 1
)
)
print('num actual diode transitions {}'.format(len(pd_times)))
return pd_times
def correct_on_off_effects(pd_times):
"""
Notes
-----
This cannot (without additional info) determine whether an assymmetric
offset is odd-long or even-long.
"""
pd_diff = np.diff(pd_times)
odd_diff_mean, odd_diff_std = trimmed_stats(pd_diff[1::2])
even_diff_mean, even_diff_std = trimmed_stats(pd_diff[0::2])
half_diff = np.diff(pd_times[0::2])
full_period_mean, full_period_std = trimmed_stats(half_diff)
half_period_mean = full_period_mean / 2
odd_offset = odd_diff_mean - half_period_mean
even_offset = even_diff_mean - half_period_mean
pd_times[::2] -= odd_offset / 2
pd_times[1::2] -= even_offset / 2
return pd_times
def flag_unexpected_edges(pd_times, ndevs=10):
pd_diff = np.diff(pd_times)
diff_mean, diff_std = trimmed_stats(pd_diff)
expected_duration_mask = np.ones(pd_diff.size)
expected_duration_mask[
np.logical_or(
pd_diff < diff_mean - ndevs * diff_std,
pd_diff > diff_mean + ndevs * diff_std,
)
] = 0
expected_duration_mask[1:] = np.logical_and(
expected_duration_mask[:-1], expected_duration_mask[1:]
)
expected_duration_mask = np.concatenate(
[expected_duration_mask, [expected_duration_mask[-1]]]
)
return expected_duration_mask
def fix_unexpected_edges(pd_times, ndevs=10, cycle=60, max_frame_offset=4):
pd_times = np.array(pd_times)
expected_duration_mask = flag_unexpected_edges(pd_times, ndevs=ndevs)
diff_mean, diff_std = trimmed_stats(np.diff(pd_times))
frame_interval = diff_mean / cycle
bad_edges = np.where(expected_duration_mask == 0)[0]
bad_blocks = np.sort(
np.unique(
np.concatenate(
[
[0],
np.where(np.diff(bad_edges) > 1)[0] + 1,
[len(bad_edges)],
]
)
)
)
output_edges = []
for low, high in zip(bad_blocks[:-1], bad_blocks[1:]):
current_bad_edge_indices = bad_edges[low : high - 1]
current_bad_edges = pd_times[current_bad_edge_indices]
low_bound = pd_times[current_bad_edge_indices[0]]
high_bound = pd_times[current_bad_edge_indices[-1] + 1]
edges_missing = int(np.around((high_bound - low_bound) / diff_mean))
expected = np.linspace(low_bound, high_bound, edges_missing + 1)
distances = distance.cdist(
current_bad_edges[:, None], expected[:, None]
)
distances = np.around(distances / frame_interval).astype(int)
min_offsets = np.amin(distances, axis=0)
min_offset_indices = np.argmin(distances, axis=0)
output_edges = np.concatenate(
[
output_edges,
expected[min_offsets > max_frame_offset],
current_bad_edges[
min_offset_indices[min_offsets <= max_frame_offset]
],
]
)
return np.sort(
np.concatenate([output_edges, pd_times[expected_duration_mask > 0]])
)
def trimmed_stats(data, pctiles=(10, 90)):
low = np.percentile(data, pctiles[0])
high = np.percentile(data, pctiles[1])
trimmed = data[np.logical_and(data <= high, data >= low)]
return np.mean(trimmed), np.std(trimmed)
def estimate_frame_duration(pd_times, cycle=60):
return trimmed_stats(np.diff(pd_times))[0] / cycle
|
PypiClean
|
/scikit-bot-0.14.0.tar.gz/scikit-bot-0.14.0/skbot/ignition/sdformat/bindings/v14/scene.py
|
from dataclasses import dataclass, field
from typing import Optional
__NAMESPACE__ = "sdformat/v1.4/scene.xsd"
@dataclass
class Scene:
"""
Specifies the look of the environment.
Parameters
----------
ambient: Color of the ambient light.
background: Color of the background.
sky: Properties for the sky
shadows: Enable/disable shadows
fog: Controls fog
grid: Enable/disable the grid
"""
class Meta:
name = "scene"
ambient: str = field(
default="0.4 0.4 0.4 1.0",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){3}\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s*",
},
)
background: str = field(
default=".7 .7 .7 1",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){3}\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s*",
},
)
sky: Optional["Scene.Sky"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
shadows: bool = field(
default=True,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
fog: Optional["Scene.Fog"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
grid: bool = field(
default=True,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Sky:
"""
Properties for the sky.
Parameters
----------
time: Time of day [0..24]
sunrise: Sunrise time [0..24]
sunset: Sunset time [0..24]
clouds: Sunset time [0..24]
"""
time: float = field(
default=10.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
sunrise: float = field(
default=6.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
sunset: float = field(
default=20.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
clouds: Optional["Scene.Sky.Clouds"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
@dataclass
class Clouds:
"""
Sunset time [0..24]
Parameters
----------
speed: Speed of the clouds
direction: Direction of the cloud movement
humidity: Density of clouds
mean_size: Average size of the clouds
ambient: Ambient cloud color
"""
speed: float = field(
default=0.6,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
direction: float = field(
default=0.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
humidity: float = field(
default=0.5,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
mean_size: float = field(
default=0.5,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
ambient: str = field(
default=".8 .8 .8 1",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){3}\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s*",
},
)
@dataclass
class Fog:
"""
Controls fog.
Parameters
----------
color: Fog color
type: Fog type: constant, linear, quadratic
start: Distance to start of fog
end: Distance to end of fog
density: Density of fog
"""
color: str = field(
default="1 1 1 1",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){3}\+?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s*",
},
)
type: str = field(
default="none",
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
start: float = field(
default=1.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
end: float = field(
default=100.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
density: float = field(
default=1.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/domain/AntMerchantExpandShopModifyModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AddressInfo import AddressInfo
from alipay.aop.api.domain.ShopBusinessTime import ShopBusinessTime
from alipay.aop.api.domain.ContactInfo import ContactInfo
from alipay.aop.api.domain.ShopExtInfo import ShopExtInfo
from alipay.aop.api.domain.IndustryQualificationInfo import IndustryQualificationInfo
class AntMerchantExpandShopModifyModel(object):
def __init__(self):
self._brand_id = None
self._business_address = None
self._business_time = None
self._cert_image = None
self._cert_name = None
self._cert_no = None
self._cert_type = None
self._contact_infos = None
self._contact_mobile = None
self._contact_phone = None
self._ext_infos = None
self._ip_role_id = None
self._legal_cert_no = None
self._legal_name = None
self._license_auth_letter_image = None
self._memo = None
self._out_door_images = None
self._qualifications = None
self._scene = None
self._settle_alipay_logon_id = None
self._shop_category = None
self._shop_id = None
self._shop_name = None
self._store_id = None
@property
def brand_id(self):
return self._brand_id
@brand_id.setter
def brand_id(self, value):
self._brand_id = value
@property
def business_address(self):
return self._business_address
@business_address.setter
def business_address(self, value):
if isinstance(value, AddressInfo):
self._business_address = value
else:
self._business_address = AddressInfo.from_alipay_dict(value)
@property
def business_time(self):
return self._business_time
@business_time.setter
def business_time(self, value):
if isinstance(value, list):
self._business_time = list()
for i in value:
if isinstance(i, ShopBusinessTime):
self._business_time.append(i)
else:
self._business_time.append(ShopBusinessTime.from_alipay_dict(i))
@property
def cert_image(self):
return self._cert_image
@cert_image.setter
def cert_image(self, value):
self._cert_image = value
@property
def cert_name(self):
return self._cert_name
@cert_name.setter
def cert_name(self, value):
self._cert_name = value
@property
def cert_no(self):
return self._cert_no
@cert_no.setter
def cert_no(self, value):
self._cert_no = value
@property
def cert_type(self):
return self._cert_type
@cert_type.setter
def cert_type(self, value):
self._cert_type = value
@property
def contact_infos(self):
return self._contact_infos
@contact_infos.setter
def contact_infos(self, value):
if isinstance(value, ContactInfo):
self._contact_infos = value
else:
self._contact_infos = ContactInfo.from_alipay_dict(value)
@property
def contact_mobile(self):
return self._contact_mobile
@contact_mobile.setter
def contact_mobile(self, value):
self._contact_mobile = value
@property
def contact_phone(self):
return self._contact_phone
@contact_phone.setter
def contact_phone(self, value):
self._contact_phone = value
@property
def ext_infos(self):
return self._ext_infos
@ext_infos.setter
def ext_infos(self, value):
if isinstance(value, list):
self._ext_infos = list()
for i in value:
if isinstance(i, ShopExtInfo):
self._ext_infos.append(i)
else:
self._ext_infos.append(ShopExtInfo.from_alipay_dict(i))
@property
def ip_role_id(self):
return self._ip_role_id
@ip_role_id.setter
def ip_role_id(self, value):
self._ip_role_id = value
@property
def legal_cert_no(self):
return self._legal_cert_no
@legal_cert_no.setter
def legal_cert_no(self, value):
self._legal_cert_no = value
@property
def legal_name(self):
return self._legal_name
@legal_name.setter
def legal_name(self, value):
self._legal_name = value
@property
def license_auth_letter_image(self):
return self._license_auth_letter_image
@license_auth_letter_image.setter
def license_auth_letter_image(self, value):
self._license_auth_letter_image = value
@property
def memo(self):
return self._memo
@memo.setter
def memo(self, value):
self._memo = value
@property
def out_door_images(self):
return self._out_door_images
@out_door_images.setter
def out_door_images(self, value):
if isinstance(value, list):
self._out_door_images = list()
for i in value:
self._out_door_images.append(i)
@property
def qualifications(self):
return self._qualifications
@qualifications.setter
def qualifications(self, value):
if isinstance(value, list):
self._qualifications = list()
for i in value:
if isinstance(i, IndustryQualificationInfo):
self._qualifications.append(i)
else:
self._qualifications.append(IndustryQualificationInfo.from_alipay_dict(i))
@property
def scene(self):
return self._scene
@scene.setter
def scene(self, value):
self._scene = value
@property
def settle_alipay_logon_id(self):
return self._settle_alipay_logon_id
@settle_alipay_logon_id.setter
def settle_alipay_logon_id(self, value):
self._settle_alipay_logon_id = value
@property
def shop_category(self):
return self._shop_category
@shop_category.setter
def shop_category(self, value):
self._shop_category = value
@property
def shop_id(self):
return self._shop_id
@shop_id.setter
def shop_id(self, value):
self._shop_id = value
@property
def shop_name(self):
return self._shop_name
@shop_name.setter
def shop_name(self, value):
self._shop_name = value
@property
def store_id(self):
return self._store_id
@store_id.setter
def store_id(self, value):
self._store_id = value
def to_alipay_dict(self):
params = dict()
if self.brand_id:
if hasattr(self.brand_id, 'to_alipay_dict'):
params['brand_id'] = self.brand_id.to_alipay_dict()
else:
params['brand_id'] = self.brand_id
if self.business_address:
if hasattr(self.business_address, 'to_alipay_dict'):
params['business_address'] = self.business_address.to_alipay_dict()
else:
params['business_address'] = self.business_address
if self.business_time:
if isinstance(self.business_time, list):
for i in range(0, len(self.business_time)):
element = self.business_time[i]
if hasattr(element, 'to_alipay_dict'):
self.business_time[i] = element.to_alipay_dict()
if hasattr(self.business_time, 'to_alipay_dict'):
params['business_time'] = self.business_time.to_alipay_dict()
else:
params['business_time'] = self.business_time
if self.cert_image:
if hasattr(self.cert_image, 'to_alipay_dict'):
params['cert_image'] = self.cert_image.to_alipay_dict()
else:
params['cert_image'] = self.cert_image
if self.cert_name:
if hasattr(self.cert_name, 'to_alipay_dict'):
params['cert_name'] = self.cert_name.to_alipay_dict()
else:
params['cert_name'] = self.cert_name
if self.cert_no:
if hasattr(self.cert_no, 'to_alipay_dict'):
params['cert_no'] = self.cert_no.to_alipay_dict()
else:
params['cert_no'] = self.cert_no
if self.cert_type:
if hasattr(self.cert_type, 'to_alipay_dict'):
params['cert_type'] = self.cert_type.to_alipay_dict()
else:
params['cert_type'] = self.cert_type
if self.contact_infos:
if hasattr(self.contact_infos, 'to_alipay_dict'):
params['contact_infos'] = self.contact_infos.to_alipay_dict()
else:
params['contact_infos'] = self.contact_infos
if self.contact_mobile:
if hasattr(self.contact_mobile, 'to_alipay_dict'):
params['contact_mobile'] = self.contact_mobile.to_alipay_dict()
else:
params['contact_mobile'] = self.contact_mobile
if self.contact_phone:
if hasattr(self.contact_phone, 'to_alipay_dict'):
params['contact_phone'] = self.contact_phone.to_alipay_dict()
else:
params['contact_phone'] = self.contact_phone
if self.ext_infos:
if isinstance(self.ext_infos, list):
for i in range(0, len(self.ext_infos)):
element = self.ext_infos[i]
if hasattr(element, 'to_alipay_dict'):
self.ext_infos[i] = element.to_alipay_dict()
if hasattr(self.ext_infos, 'to_alipay_dict'):
params['ext_infos'] = self.ext_infos.to_alipay_dict()
else:
params['ext_infos'] = self.ext_infos
if self.ip_role_id:
if hasattr(self.ip_role_id, 'to_alipay_dict'):
params['ip_role_id'] = self.ip_role_id.to_alipay_dict()
else:
params['ip_role_id'] = self.ip_role_id
if self.legal_cert_no:
if hasattr(self.legal_cert_no, 'to_alipay_dict'):
params['legal_cert_no'] = self.legal_cert_no.to_alipay_dict()
else:
params['legal_cert_no'] = self.legal_cert_no
if self.legal_name:
if hasattr(self.legal_name, 'to_alipay_dict'):
params['legal_name'] = self.legal_name.to_alipay_dict()
else:
params['legal_name'] = self.legal_name
if self.license_auth_letter_image:
if hasattr(self.license_auth_letter_image, 'to_alipay_dict'):
params['license_auth_letter_image'] = self.license_auth_letter_image.to_alipay_dict()
else:
params['license_auth_letter_image'] = self.license_auth_letter_image
if self.memo:
if hasattr(self.memo, 'to_alipay_dict'):
params['memo'] = self.memo.to_alipay_dict()
else:
params['memo'] = self.memo
if self.out_door_images:
if isinstance(self.out_door_images, list):
for i in range(0, len(self.out_door_images)):
element = self.out_door_images[i]
if hasattr(element, 'to_alipay_dict'):
self.out_door_images[i] = element.to_alipay_dict()
if hasattr(self.out_door_images, 'to_alipay_dict'):
params['out_door_images'] = self.out_door_images.to_alipay_dict()
else:
params['out_door_images'] = self.out_door_images
if self.qualifications:
if isinstance(self.qualifications, list):
for i in range(0, len(self.qualifications)):
element = self.qualifications[i]
if hasattr(element, 'to_alipay_dict'):
self.qualifications[i] = element.to_alipay_dict()
if hasattr(self.qualifications, 'to_alipay_dict'):
params['qualifications'] = self.qualifications.to_alipay_dict()
else:
params['qualifications'] = self.qualifications
if self.scene:
if hasattr(self.scene, 'to_alipay_dict'):
params['scene'] = self.scene.to_alipay_dict()
else:
params['scene'] = self.scene
if self.settle_alipay_logon_id:
if hasattr(self.settle_alipay_logon_id, 'to_alipay_dict'):
params['settle_alipay_logon_id'] = self.settle_alipay_logon_id.to_alipay_dict()
else:
params['settle_alipay_logon_id'] = self.settle_alipay_logon_id
if self.shop_category:
if hasattr(self.shop_category, 'to_alipay_dict'):
params['shop_category'] = self.shop_category.to_alipay_dict()
else:
params['shop_category'] = self.shop_category
if self.shop_id:
if hasattr(self.shop_id, 'to_alipay_dict'):
params['shop_id'] = self.shop_id.to_alipay_dict()
else:
params['shop_id'] = self.shop_id
if self.shop_name:
if hasattr(self.shop_name, 'to_alipay_dict'):
params['shop_name'] = self.shop_name.to_alipay_dict()
else:
params['shop_name'] = self.shop_name
if self.store_id:
if hasattr(self.store_id, 'to_alipay_dict'):
params['store_id'] = self.store_id.to_alipay_dict()
else:
params['store_id'] = self.store_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntMerchantExpandShopModifyModel()
if 'brand_id' in d:
o.brand_id = d['brand_id']
if 'business_address' in d:
o.business_address = d['business_address']
if 'business_time' in d:
o.business_time = d['business_time']
if 'cert_image' in d:
o.cert_image = d['cert_image']
if 'cert_name' in d:
o.cert_name = d['cert_name']
if 'cert_no' in d:
o.cert_no = d['cert_no']
if 'cert_type' in d:
o.cert_type = d['cert_type']
if 'contact_infos' in d:
o.contact_infos = d['contact_infos']
if 'contact_mobile' in d:
o.contact_mobile = d['contact_mobile']
if 'contact_phone' in d:
o.contact_phone = d['contact_phone']
if 'ext_infos' in d:
o.ext_infos = d['ext_infos']
if 'ip_role_id' in d:
o.ip_role_id = d['ip_role_id']
if 'legal_cert_no' in d:
o.legal_cert_no = d['legal_cert_no']
if 'legal_name' in d:
o.legal_name = d['legal_name']
if 'license_auth_letter_image' in d:
o.license_auth_letter_image = d['license_auth_letter_image']
if 'memo' in d:
o.memo = d['memo']
if 'out_door_images' in d:
o.out_door_images = d['out_door_images']
if 'qualifications' in d:
o.qualifications = d['qualifications']
if 'scene' in d:
o.scene = d['scene']
if 'settle_alipay_logon_id' in d:
o.settle_alipay_logon_id = d['settle_alipay_logon_id']
if 'shop_category' in d:
o.shop_category = d['shop_category']
if 'shop_id' in d:
o.shop_id = d['shop_id']
if 'shop_name' in d:
o.shop_name = d['shop_name']
if 'store_id' in d:
o.store_id = d['store_id']
return o
|
PypiClean
|
/fetch_data-0.2.5.2.tar.gz/fetch_data-0.2.5.2/docs/index.rst
|
.. gcm-filters documentation master file, created by
sphinx-quickstart on Tue Jan 12 09:24:23 2021.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to fetch-data
=====================
Fetch data is a simple tool to quickly download data from various sources.
It is a package I've developed for my own needs, so I haven't written too
many tests. The :code:`fetch-data` relies on two established packages,
:code:`pooch` and :code:`fsspec`. I have combined these to make the
downloading process quick and easy.
Installation
------------
.. code-block:: bash
pip install fetch-data
Contents
--------
.. toctree::
:maxdepth: 2
:caption: Usage examples
usage
.. toctree::
:maxdepth: 2
:caption: Auto API
api
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
|
PypiClean
|
/django-simple-utilities-0.8.tar.gz/django-simple-utilities-0.8/utilities/static/utilities/js/models/tinymce/jscripts/tiny_mce/plugins/fullpage/editor_plugin_src.js
|
(function() {
tinymce.create('tinymce.plugins.FullPagePlugin', {
init : function(ed, url) {
var t = this;
t.editor = ed;
// Register commands
ed.addCommand('mceFullPageProperties', function() {
ed.windowManager.open({
file : url + '/fullpage.htm',
width : 430 + parseInt(ed.getLang('fullpage.delta_width', 0)),
height : 495 + parseInt(ed.getLang('fullpage.delta_height', 0)),
inline : 1
}, {
plugin_url : url,
head_html : t.head
});
});
// Register buttons
ed.addButton('fullpage', {title : 'fullpage.desc', cmd : 'mceFullPageProperties'});
ed.onBeforeSetContent.add(t._setContent, t);
ed.onSetContent.add(t._setBodyAttribs, t);
ed.onGetContent.add(t._getContent, t);
},
getInfo : function() {
return {
longname : 'Fullpage',
author : 'Moxiecode Systems AB',
authorurl : 'http://tinymce.moxiecode.com',
infourl : 'http://wiki.moxiecode.com/index.php/TinyMCE:Plugins/fullpage',
version : tinymce.majorVersion + "." + tinymce.minorVersion
};
},
// Private plugin internal methods
_setBodyAttribs : function(ed, o) {
var bdattr, i, len, kv, k, v, t, attr = this.head.match(/body(.*?)>/i);
if (attr && attr[1]) {
bdattr = attr[1].match(/\s*(\w+\s*=\s*".*?"|\w+\s*=\s*'.*?'|\w+\s*=\s*\w+|\w+)\s*/g);
if (bdattr) {
for(i = 0, len = bdattr.length; i < len; i++) {
kv = bdattr[i].split('=');
k = kv[0].replace(/\s/,'');
v = kv[1];
if (v) {
v = v.replace(/^\s+/,'').replace(/\s+$/,'');
t = v.match(/^["'](.*)["']$/);
if (t)
v = t[1];
} else
v = k;
ed.dom.setAttrib(ed.getBody(), 'style', v);
}
}
}
},
_createSerializer : function() {
return new tinymce.dom.Serializer({
dom : this.editor.dom,
apply_source_formatting : true
});
},
_setContent : function(ed, o) {
var t = this, sp, ep, c = o.content, v, st = '';
// Ignore raw updated if we already have a head, this will fix issues with undo/redo keeping the head/foot separate
if (o.format == 'raw' && t.head)
return;
if (o.source_view && ed.getParam('fullpage_hide_in_source_view'))
return;
// Parse out head, body and footer
c = c.replace(/<(\/?)BODY/gi, '<$1body');
sp = c.indexOf('<body');
if (sp != -1) {
sp = c.indexOf('>', sp);
t.head = c.substring(0, sp + 1);
ep = c.indexOf('</body', sp);
if (ep == -1)
ep = c.indexOf('</body', ep);
o.content = c.substring(sp + 1, ep);
t.foot = c.substring(ep);
function low(s) {
return s.replace(/<\/?[A-Z]+/g, function(a) {
return a.toLowerCase();
})
};
t.head = low(t.head);
t.foot = low(t.foot);
} else {
t.head = '';
if (ed.getParam('fullpage_default_xml_pi'))
t.head += '<?xml version="1.0" encoding="' + ed.getParam('fullpage_default_encoding', 'ISO-8859-1') + '" ?>\n';
t.head += ed.getParam('fullpage_default_doctype', '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">');
t.head += '\n<html>\n<head>\n<title>' + ed.getParam('fullpage_default_title', 'Untitled document') + '</title>\n';
if (v = ed.getParam('fullpage_default_encoding'))
t.head += '<meta http-equiv="Content-Type" content="' + v + '" />\n';
if (v = ed.getParam('fullpage_default_font_family'))
st += 'font-family: ' + v + ';';
if (v = ed.getParam('fullpage_default_font_size'))
st += 'font-size: ' + v + ';';
if (v = ed.getParam('fullpage_default_text_color'))
st += 'color: ' + v + ';';
t.head += '</head>\n<body' + (st ? ' style="' + st + '"' : '') + '>\n';
t.foot = '\n</body>\n</html>';
}
},
_getContent : function(ed, o) {
var t = this;
if (!o.source_view || !ed.getParam('fullpage_hide_in_source_view'))
o.content = tinymce.trim(t.head) + '\n' + tinymce.trim(o.content) + '\n' + tinymce.trim(t.foot);
}
});
// Register plugin
tinymce.PluginManager.add('fullpage', tinymce.plugins.FullPagePlugin);
})();
|
PypiClean
|
/adversarial_robustness_toolbox-1.15.1-py3-none-any.whl/art/metrics/privacy/worst_case_mia_score.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from typing import Optional, List, Tuple, Union
import numpy as np
from sklearn.metrics import roc_curve
TPR = float # True Positive Rate
FPR = float # False Positive Rate
THR = float # Threshold of the binary decision
def _calculate_roc_for_fpr(y_true: np.ndarray, y_proba: np.ndarray, targeted_fpr: float) -> Tuple[FPR, TPR, THR]:
"""
Get FPR, TPR and, THRESHOLD based on the targeted_fpr (such that FPR <= targeted_fpr)
:param y_true: True attack labels.
:param y_proba: Predicted attack probabilities.
:param targeted_fpr: the targeted False Positive Rate, ROC will be calculated based on this FPR.
:return: tuple that contains (Achieved FPR, TPR, Threshold).
"""
fpr, tpr, thr = roc_curve(y_true=y_true, y_score=y_proba)
# take the highest fpr and an appropriated threshold that achieve at least FPR=fpr
if np.isnan(fpr).all() or np.isnan(tpr).all():
logging.error("TPR or FPR values are NaN")
raise ValueError("The targeted FPR can't be achieved.")
targeted_fpr_idx = np.where(fpr <= targeted_fpr)[0][-1]
return fpr[targeted_fpr_idx], tpr[targeted_fpr_idx], thr[targeted_fpr_idx]
def get_roc_for_fpr(
attack_proba: np.ndarray,
attack_true: np.ndarray,
target_model_labels: Optional[np.ndarray] = None,
targeted_fpr: float = 0.001,
) -> Union[List[Tuple[FPR, TPR, THR]], List[Tuple[int, FPR, TPR, THR]]]:
"""
Compute the attack TPR, THRESHOLD and achieved FPR based on the targeted FPR. This implementation supports only
binary attack prediction labels {0,1}. The returned THRESHOLD defines the decision threshold on the attack
probabilities (meaning if p < THRESHOLD predict 0, otherwise predict 1)
| Related paper link: https://arxiv.org/abs/2112.03570
:param attack_proba: Predicted attack probabilities.
:param attack_true: True attack labels.
:param targeted_fpr: the targeted False Positive Rate, attack accuracy will be calculated based on this FPRs.
If not supplied, get_roc_for_fpr will be computed for `0.001` FPR.
:param target_model_labels: Original labels, if provided the Accuracy and threshold will be calculated per each
class separately.
:return: list of tuples the contains (original label (if target_model_labels is provided),
Achieved FPR, TPR, Threshold).
"""
if attack_proba.shape[0] != attack_true.shape[0]:
raise ValueError("Number of rows in attack_pred and attack_true do not match")
if target_model_labels is not None and attack_proba.shape[0] != target_model_labels.shape[0]:
raise ValueError("Number of rows in target_model_labels and attack_pred do not match")
results = []
if target_model_labels is not None:
values, _ = np.unique(target_model_labels, return_counts=True)
for value in values:
idxs = np.where(target_model_labels == value)[0]
fpr, tpr, thr = _calculate_roc_for_fpr(
y_proba=attack_proba[idxs], y_true=attack_true[idxs], targeted_fpr=targeted_fpr
)
results.append((value, fpr, tpr, thr))
return results
fpr, tpr, thr = _calculate_roc_for_fpr(y_proba=attack_proba, y_true=attack_true, targeted_fpr=targeted_fpr)
return [(fpr, tpr, thr)]
def get_roc_for_multi_fprs(
attack_proba: np.ndarray,
attack_true: np.ndarray,
targeted_fprs: np.ndarray,
) -> Tuple[List[FPR], List[TPR], List[THR]]:
"""
Compute the attack ROC based on the targeted FPRs. This implementation supports only binary
attack prediction labels. The returned list of THRESHOLDs defines the decision threshold on the attack
probabilities (meaning if p < THRESHOLD predict 0, otherwise predict 1) for each provided fpr
| Related paper link: https://arxiv.org/abs/2112.03570
:param attack_proba: Predicted attack probabilities.
:param attack_true: True attack labels.
:param targeted_fprs: the set of targeted FPR (False Positive Rates), attack accuracy will be calculated based on
these FPRs.
:return: list of tuples that (TPR, Threshold, Achieved FPR).
"""
if attack_proba.shape[0] != attack_true.shape[0]:
raise ValueError("Number of rows in attack_pred and attack_true do not match")
tpr = []
thr = []
fpr = []
for t_fpr in targeted_fprs:
res = _calculate_roc_for_fpr(y_proba=attack_proba, y_true=attack_true, targeted_fpr=t_fpr)
fpr.append(res[0])
tpr.append(res[1])
thr.append(res[2])
return fpr, tpr, thr
|
PypiClean
|
/ansys_meshing_prime-0.4.0.tar.gz/ansys_meshing_prime-0.4.0/src/ansys/meshing/prime/internals/json_utils.py
|
import json
from typing import Union
import numpy as np
import ansys.meshing.prime.internals.config as config
import ansys.meshing.prime.relaxed_json as relaxed_json
__all__ = ['loads', 'dumps']
def try_process_as_iterable(obj):
"""Try if an object is an iterable and return its list.
Parameters
----------
obj : Any
Object to test.
Returns
-------
List
List of the object.
"""
iterable = iter(obj)
return list(iterable)
def try_process_numpy_array(obj):
"""Try if an object is a numpy array and return its list.
Parameters
----------
obj : Any
Object to test.
Returns
-------
bool, list
Whether the object is a numpy array and the list of the object.
"""
if isinstance(obj, np.ndarray):
return True, obj.tolist()
return False, obj
class _CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
success, obj = try_process_numpy_array(obj)
if success:
return obj
try:
return try_process_as_iterable(obj)
except TypeError:
pass
return super().default(obj)
class _CustomBinaryJSONEncoder(relaxed_json.JSONEncoder):
def default(self, obj):
try:
return try_process_as_iterable(obj)
except TypeError:
pass
return super().default(obj)
def loads(s: Union[str, bytes, bytearray], *args, **kwargs):
"""Load JSON from an input string.
Parameters
----------
s : Union[str, bytes, bytearray]
Input string.
Returns
-------
json
Object converted to JSON.
"""
if config.is_optimizing_numpy_arrays():
return relaxed_json.loads(s, *args, **kwargs)
return json.loads(s, *args, **kwargs)
def dumps(obj, *args, **kwargs):
"""Dump JSON to an object.
Parameters
----------
obj : Any
Input object.
Returns
-------
Object
JSON converted to an object.
"""
if config.is_optimizing_numpy_arrays():
kwargs.setdefault('cls', _CustomBinaryJSONEncoder)
return relaxed_json.dumps(obj, *args, **kwargs)
kwargs.setdefault('cls', _CustomJSONEncoder)
return json.dumps(obj, *args, **kwargs)
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.