id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/autora-experimentalist-sampler-uncertainty-1.0.1.tar.gz/autora-experimentalist-sampler-uncertainty-1.0.1/docs/Basic Usage.ipynb
|
# Import Modules
```
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from autora.experimentalist.sampler.uncertainty import uncertainty_sampler
```
# Define Meta-Space
We will here define X values of interest as well as a ground truth model to derive y values.
```
#Define meta-parameters
X = np.linspace(start=-3, stop=6, num=10).reshape(-1, 1)
#Define ground truth model
def ground_truth(xs):
y = (xs ** 2.0)
y[xs < 0] = 0
return y
```
# Plot the Data
Let's plot the data to see what we are working with.
```
plt.plot(X, ground_truth(X), 'o')
plt.show()
```
# Define and Fit Theorist
Next, we initialize the theorist and then train it on the data.
```
%%capture
#Initiate theorists
lr_theorist = LogisticRegression()
#Fit theorists
lr_theorist.fit(X,ground_truth(X))
```
# Plot Theorists on Data
We can then plot the theorist to see how well it recovered the data.
```
plt.plot(X, ground_truth(X), 'o')
plt.plot(X, lr_theorist.predict(X), alpha = .5)
plt.show()
```
# Run and Report Uncertainty Samples
Now we will get a proposal from the sampler as to which datapoints to investigate next. We will retrieve 5 new datapoints in this example.
```
sampler_proposal_lc = uncertainty_sampler(X, lr_theorist, 5, measure ="least_confident")
sampler_proposal_marg = uncertainty_sampler(X, lr_theorist, 5, measure ="margin")
sampler_proposal_ent = uncertainty_sampler(X, lr_theorist, 5, measure ="entropy")
print('New datapoints with Least Confident metric:\n' + str(sampler_proposal_lc) + '\n')
print('New datapoints with Margin metric:\n' + str(sampler_proposal_marg) + '\n')
print('New datapoints with Entropy metric:\n' + str(sampler_proposal_ent))
```
# Plot New Datapoints With Old
We can then plot our new datapoints with our previous ones to demonstrate our new dataset of investigation for then next cycle.
```
plt.plot(X, ground_truth(X), 'o', alpha = .5, label = 'Original Datapoints')
plt.plot(sampler_proposal_lc, ground_truth(sampler_proposal_lc), 'o', alpha = .5, label = 'Least Confident')
plt.plot(sampler_proposal_marg, ground_truth(sampler_proposal_marg), 'o', alpha = .5, label = 'Margin')
plt.plot(sampler_proposal_ent, ground_truth(sampler_proposal_ent), 'o', alpha = .5, label = 'Entropy')
plt.legend()
plt.show()
```
|
PypiClean
|
/mpl-interact-0.0.1.tar.gz/mpl-interact-0.0.1/mpl_interact/zoom.py
|
import abc
from typing import Optional
from mpl_events import mpl, MplObject_Type
from .base import InteractorBase, AxisType, KeyModifier
from .utils import scale_to_log, scale_from_log
class AxesZoomable(abc.ABC):
"""Axes zoomable interface
"""
@abc.abstractmethod
def begin(self, event: mpl.LocationEvent):
pass
@abc.abstractmethod
def end(self, event: mpl.LocationEvent):
pass
@abc.abstractmethod
def zoom(self, event: mpl.LocationEvent, step: float, axis: AxisType = AxisType.ALL) -> bool:
"""This method should implement zoom functionality
"""
pass
class MouseAnchorAxesZoomer(AxesZoomable):
"""Zooming axes according to mouse cursor position
Performs zoom with anchor in current mouse cursor position.
In this way you scale what you are looking at.
"""
def begin(self, event: mpl.LocationEvent):
pass
def end(self, event: mpl.LocationEvent):
pass
def zoom(self, event: mpl.LocationEvent, step: float, axis: AxisType = AxisType.ALL) -> bool:
axes: mpl.Axes = event.inaxes
if not axes or not axes.in_axes(event) or not axes.can_zoom():
return False
anchor_x = event.xdata
anchor_y = event.ydata
xmin, xmax = axes.get_xlim()
ymin, ymax = axes.get_ylim()
is_xlog = axes.get_xscale() == 'log'
is_ylog = axes.get_yscale() == 'log'
if axis == AxisType.X:
xmin, xmax = self._recalc_axis_limits(xmin, xmax, anchor_x, step, is_xlog)
elif axis == AxisType.Y:
ymin, ymax = self._recalc_axis_limits(ymin, ymax, anchor_y, step, is_ylog)
elif axis == AxisType.ALL:
xmin, xmax = self._recalc_axis_limits(xmin, xmax, anchor_x, step, is_xlog)
ymin, ymax = self._recalc_axis_limits(ymin, ymax, anchor_y, step, is_ylog)
axes.set_xlim(xmin, xmax)
axes.set_ylim(ymin, ymax)
return True
@staticmethod
def _recalc_axis_limits(lim_min, lim_max, anchor, zoom_step, is_log_scale):
if is_log_scale:
lim_min = scale_to_log(lim_min)
lim_max = scale_to_log(lim_max)
anchor = scale_to_log(anchor)
anchor = lim_min if anchor < lim_min else anchor
anchor = lim_max if anchor > lim_max else anchor
ra = abs(lim_min - anchor)
rb = abs(lim_max - anchor)
lim_min = lim_min + ra * zoom_step
lim_max = lim_max - rb * zoom_step
if lim_min > lim_max:
lim_min, lim_max = lim_max, lim_min
if is_log_scale:
lim_min = scale_from_log(lim_min)
lim_max = scale_from_log(lim_max)
return lim_min, lim_max
class ZoomInteractorBase(InteractorBase):
"""The base interactor for zooming data on an axes
"""
def __init__(self, mpl_obj: MplObject_Type, zoomer: Optional[AxesZoomable] = None):
super().__init__(mpl_obj)
self._step = 0.2
if not zoomer:
zoomer = MouseAnchorAxesZoomer()
self._zoomer = zoomer
@property
def zoomer(self) -> AxesZoomable:
return self._zoomer
@property
def step(self) -> float:
return self._step
@step.setter
def step(self, value: float):
if value > 0:
self._step = value
else:
raise ValueError('Zoom step value must be greater than zero')
class MouseWheelScrollZoomInteractor(ZoomInteractorBase):
"""The mouse wheel scroll interactor for zooming data on axes
"""
x_axis_keys = {'x', 'X'}
y_axis_keys = {'y', 'Y'}
def __init__(self, mpl_obj: MplObject_Type, zoomer: Optional[AxesZoomable] = None):
super().__init__(mpl_obj, zoomer)
self._inversion = True
@property
def inversion(self) -> bool:
return self._inversion
@inversion.setter
def inversion(self, value: bool):
self._inversion = value
def on_mouse_wheel_scroll(self, event: mpl.MouseEvent):
step = self.step * event.step
step = -step if self.inversion else step
key = self.parse_key(event.key)
if self.check_key(key, self.x_axis_keys, KeyModifier.NO):
axis = AxisType.X
elif self.check_key(key, self.y_axis_keys, KeyModifier.NO):
axis = AxisType.Y
else:
axis = AxisType.ALL
if self.zoomer.zoom(event, step, axis):
self.update()
class KeyZoomInteractor(ZoomInteractorBase):
"""Keyboard based zoom interactor
"""
disable_default_handlers = True
zoom_plus_keys = {'p', '=', '+'}
zoom_minus_keys = {'m', '-'}
x_modifier = KeyModifier.CTRL
y_modifier = KeyModifier.ALT
def on_key_press(self, event: mpl.KeyEvent):
key = self.parse_key(event.key)
if not key or key.modifier == KeyModifier.CTRL | KeyModifier.ALT:
return
if self.check_key(key, self.zoom_plus_keys):
step = self.step
elif self.check_key(key, self.zoom_minus_keys):
step = -self.step
else:
return
axis = AxisType.ALL
if key.modifier == self.x_modifier:
axis = AxisType.X
elif key.modifier == self.y_modifier:
axis = AxisType.Y
if self.zoomer.zoom(event, step, axis):
self.update()
|
PypiClean
|
/neural_compressor-2.2.1-py3-none-any.whl/neural_compressor/experimental/nas/search_algorithms.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from .nas_utils import create_search_space_pool
from neural_compressor.strategy.bayesian import BayesianOptimization
from neural_compressor.utils import logger
class Searcher(object):
"""Base class for defining the common methods of different search algorithms.
Args:
search_space (dict): A dictionary for defining the search space.
"""
def __init__(self, search_space) -> None:
"""Initialize the attributes."""
assert isinstance(search_space, dict) and search_space, \
"Expect search_space to be a dict."
self.search_space = search_space
self.search_space_keys = sorted(search_space.keys())
for k in self.search_space_keys:
assert isinstance(self.search_space[k], (list, tuple)), \
"Value of key \'{}\' must be a list or tuple to specify choices".format(
k)
def suggest(self):
"""Suggest the model architecture."""
raise NotImplementedError('Depends on specific search algorithm.') # pragma: no cover
def get_feedback(self, metric):
"""Get metric feedback for the search algorithm."""
pass
def params_vec2params_dict(self, para_vec):
"""Convert the parameters vector to parameters dictionary.
Where parameters vector and parameters dictionary both define the model architecture.
Returns:
Parameters dictionary defining the model architecture.
"""
assert len(para_vec) == len(self.search_space_keys), \
"Length of para_vec and search_space_keys should be the same."
return {k: para_vec[i] for i, k in enumerate(self.search_space_keys)}
class GridSearcher(Searcher):
"""Grid search.
Search the whole search space exhaustively.
Args:
search_space (dict): A dictionary for defining the search space.
"""
def __init__(self, search_space) -> None:
"""Initialize the attributes."""
super(GridSearcher, self).__init__(search_space)
self.search_space_pool = create_search_space_pool(search_space)
self.idx = 0
def suggest(self):
"""Suggest the model architecture.
Returns:
The model architecture.
"""
res = self.search_space_pool[self.idx]
self.idx = (self.idx + 1) % len(self.search_space_pool)
return self.params_vec2params_dict(res)
class RandomSearcher(Searcher):
"""Random search.
Search the whole search space randomly.
Args:
search_space (dict): A dictionary for defining the search space.
"""
def __init__(self, search_space, seed=42) -> None:
"""Initialize the attributes."""
super(RandomSearcher, self).__init__(search_space)
self.search_space_pool = create_search_space_pool(search_space)
self.indices_pool = list(range(len(self.search_space_pool)))
random.seed(seed)
random.shuffle(self.indices_pool)
def suggest(self):
"""Suggest the model architecture.
Returns:
The model architecture.
"""
if not self.indices_pool:
self.indices_pool = list(range(len(self.search_space_pool)))
random.shuffle(self.indices_pool)
idx = self.indices_pool.pop(-1)
return self.params_vec2params_dict(self.search_space_pool[idx])
class BayesianOptimizationSearcher(Searcher):
"""Bayesian Optimization.
Search the search space with Bayesian Optimization.
Args:
search_space (dict): A dictionary for defining the search space.
"""
def __init__(self, search_space, seed=42) -> None:
"""Initialize the attributes."""
super(BayesianOptimizationSearcher, self).__init__(search_space)
idx_search_space = {
k: (0, len(search_space[k])-1) for k in self.search_space_keys}
self.bo_agent = BayesianOptimization(
idx_search_space, random_seed=seed)
self.last_param_indices = None
def suggest(self):
"""Suggest the model architecture.
Returns:
The model architecture.
"""
param_indices = self.bo_agent.gen_next_params()
self.last_param_indices = param_indices
return self.params_vec2params_dict(self.indices2params_vec(param_indices))
def get_feedback(self, metric):
"""Get metric feedback and register this metric."""
assert self.last_param_indices is not None, "Need run suggest first " + \
"to get parameters and the input metric is corresponding to this parameters."
try:
self.bo_agent._space.register(self.last_param_indices, metric)
except KeyError: # pragma: no cover
logger.debug("Find registered params, skip it.")
pass
self.last_param_indices = None
def indices2params_vec(self, indices):
"""Convert indices to parameters vector."""
res = []
for key, ind in indices.items():
# keep ind within the index range of self.search_space[key]
ind = int(min(max(round(ind), 0), len(self.search_space[key])-1))
res.append(self.search_space[key][ind])
return res
|
PypiClean
|
/PyQt6_Frameless_Window-0.3.2-py3-none-any.whl/qframelesswindow/linux/__init__.py
|
from PyQt6.QtCore import QCoreApplication, QEvent, Qt
from PyQt6.QtGui import QMouseEvent
from PyQt6.QtWidgets import QWidget
from ..titlebar import TitleBar
from ..utils.linux_utils import LinuxMoveResize
from .window_effect import LinuxWindowEffect
class LinuxFramelessWindow(QWidget):
""" Frameless window for Linux system """
BORDER_WIDTH = 5
def __init__(self, parent=None):
super().__init__(parent=parent)
self.windowEffect = LinuxWindowEffect(self)
self.titleBar = TitleBar(self)
self._isResizeEnabled = True
self.setWindowFlags(self.windowFlags() |
Qt.WindowType.FramelessWindowHint)
QCoreApplication.instance().installEventFilter(self)
self.titleBar.raise_()
self.resize(500, 500)
def resizeEvent(self, e):
super().resizeEvent(e)
self.titleBar.resize(self.width(), self.titleBar.height())
def setTitleBar(self, titleBar):
""" set custom title bar
Parameters
----------
titleBar: TitleBar
title bar
"""
self.titleBar.deleteLater()
self.titleBar = titleBar
self.titleBar.setParent(self)
self.titleBar.raise_()
def setResizeEnabled(self, isEnabled: bool):
""" set whether resizing is enabled """
self._isResizeEnabled = isEnabled
def eventFilter(self, obj, event):
et = event.type()
if et != QEvent.Type.MouseButtonPress and et != QEvent.Type.MouseMove or not self._isResizeEnabled:
return False
edges = Qt.Edge(0)
pos = event.globalPosition().toPoint() - self.pos()
if pos.x() < self.BORDER_WIDTH:
edges |= Qt.Edge.LeftEdge
if pos.x() >= self.width()-self.BORDER_WIDTH:
edges |= Qt.Edge.RightEdge
if pos.y() < self.BORDER_WIDTH:
edges |= Qt.Edge.TopEdge
if pos.y() >= self.height()-self.BORDER_WIDTH:
edges |= Qt.Edge.BottomEdge
# change cursor
if et == QEvent.Type.MouseMove and self.windowState() == Qt.WindowState.WindowNoState:
if edges in (Qt.Edge.LeftEdge | Qt.Edge.TopEdge, Qt.Edge.RightEdge | Qt.Edge.BottomEdge):
self.setCursor(Qt.CursorShape.SizeFDiagCursor)
elif edges in (Qt.Edge.RightEdge | Qt.Edge.TopEdge, Qt.Edge.LeftEdge | Qt.Edge.BottomEdge):
self.setCursor(Qt.CursorShape.SizeBDiagCursor)
elif edges in (Qt.Edge.TopEdge, Qt.Edge.BottomEdge):
self.setCursor(Qt.CursorShape.SizeVerCursor)
elif edges in (Qt.Edge.LeftEdge, Qt.Edge.RightEdge):
self.setCursor(Qt.CursorShape.SizeHorCursor)
else:
self.setCursor(Qt.CursorShape.ArrowCursor)
elif obj in (self, self.titleBar) and et == QEvent.Type.MouseButtonPress and edges:
LinuxMoveResize.starSystemResize(self, event.globalPosition(), edges)
return super().eventFilter(obj, event)
|
PypiClean
|
/tricubic-1.0.3.tar.gz/tricubic-1.0.3/thirdparty/pybind11/docs/advanced/pycpp/numpy.rst
|
.. _numpy:
NumPy
#####
Buffer protocol
===============
Python supports an extremely general and convenient approach for exchanging
data between plugin libraries. Types can expose a buffer view [#f2]_, which
provides fast direct access to the raw internal data representation. Suppose we
want to bind the following simplistic Matrix class:
.. code-block:: cpp
class Matrix {
public:
Matrix(size_t rows, size_t cols) : m_rows(rows), m_cols(cols) {
m_data = new float[rows*cols];
}
float *data() { return m_data; }
size_t rows() const { return m_rows; }
size_t cols() const { return m_cols; }
private:
size_t m_rows, m_cols;
float *m_data;
};
The following binding code exposes the ``Matrix`` contents as a buffer object,
making it possible to cast Matrices into NumPy arrays. It is even possible to
completely avoid copy operations with Python expressions like
``np.array(matrix_instance, copy = False)``.
.. code-block:: cpp
py::class_<Matrix>(m, "Matrix", py::buffer_protocol())
.def_buffer([](Matrix &m) -> py::buffer_info {
return py::buffer_info(
m.data(), /* Pointer to buffer */
sizeof(float), /* Size of one scalar */
py::format_descriptor<float>::format(), /* Python struct-style format descriptor */
2, /* Number of dimensions */
{ m.rows(), m.cols() }, /* Buffer dimensions */
{ sizeof(float) * m.cols(), /* Strides (in bytes) for each index */
sizeof(float) }
);
});
Supporting the buffer protocol in a new type involves specifying the special
``py::buffer_protocol()`` tag in the ``py::class_`` constructor and calling the
``def_buffer()`` method with a lambda function that creates a
``py::buffer_info`` description record on demand describing a given matrix
instance. The contents of ``py::buffer_info`` mirror the Python buffer protocol
specification.
.. code-block:: cpp
struct buffer_info {
void *ptr;
ssize_t itemsize;
std::string format;
ssize_t ndim;
std::vector<ssize_t> shape;
std::vector<ssize_t> strides;
};
To create a C++ function that can take a Python buffer object as an argument,
simply use the type ``py::buffer`` as one of its arguments. Buffers can exist
in a great variety of configurations, hence some safety checks are usually
necessary in the function body. Below, you can see an basic example on how to
define a custom constructor for the Eigen double precision matrix
(``Eigen::MatrixXd``) type, which supports initialization from compatible
buffer objects (e.g. a NumPy matrix).
.. code-block:: cpp
/* Bind MatrixXd (or some other Eigen type) to Python */
typedef Eigen::MatrixXd Matrix;
typedef Matrix::Scalar Scalar;
constexpr bool rowMajor = Matrix::Flags & Eigen::RowMajorBit;
py::class_<Matrix>(m, "Matrix", py::buffer_protocol())
.def("__init__", [](Matrix &m, py::buffer b) {
typedef Eigen::Stride<Eigen::Dynamic, Eigen::Dynamic> Strides;
/* Request a buffer descriptor from Python */
py::buffer_info info = b.request();
/* Some sanity checks ... */
if (info.format != py::format_descriptor<Scalar>::format())
throw std::runtime_error("Incompatible format: expected a double array!");
if (info.ndim != 2)
throw std::runtime_error("Incompatible buffer dimension!");
auto strides = Strides(
info.strides[rowMajor ? 0 : 1] / (py::ssize_t)sizeof(Scalar),
info.strides[rowMajor ? 1 : 0] / (py::ssize_t)sizeof(Scalar));
auto map = Eigen::Map<Matrix, 0, Strides>(
static_cast<Scalar *>(info.ptr), info.shape[0], info.shape[1], strides);
new (&m) Matrix(map);
});
For reference, the ``def_buffer()`` call for this Eigen data type should look
as follows:
.. code-block:: cpp
.def_buffer([](Matrix &m) -> py::buffer_info {
return py::buffer_info(
m.data(), /* Pointer to buffer */
sizeof(Scalar), /* Size of one scalar */
py::format_descriptor<Scalar>::format(), /* Python struct-style format descriptor */
2, /* Number of dimensions */
{ m.rows(), m.cols() }, /* Buffer dimensions */
{ sizeof(Scalar) * (rowMajor ? m.cols() : 1),
sizeof(Scalar) * (rowMajor ? 1 : m.rows()) }
/* Strides (in bytes) for each index */
);
})
For a much easier approach of binding Eigen types (although with some
limitations), refer to the section on :doc:`/advanced/cast/eigen`.
.. seealso::
The file :file:`tests/test_buffers.cpp` contains a complete example
that demonstrates using the buffer protocol with pybind11 in more detail.
.. [#f2] http://docs.python.org/3/c-api/buffer.html
Arrays
======
By exchanging ``py::buffer`` with ``py::array`` in the above snippet, we can
restrict the function so that it only accepts NumPy arrays (rather than any
type of Python object satisfying the buffer protocol).
In many situations, we want to define a function which only accepts a NumPy
array of a certain data type. This is possible via the ``py::array_t<T>``
template. For instance, the following function requires the argument to be a
NumPy array containing double precision values.
.. code-block:: cpp
void f(py::array_t<double> array);
When it is invoked with a different type (e.g. an integer or a list of
integers), the binding code will attempt to cast the input into a NumPy array
of the requested type. Note that this feature requires the
:file:`pybind11/numpy.h` header to be included.
Data in NumPy arrays is not guaranteed to packed in a dense manner;
furthermore, entries can be separated by arbitrary column and row strides.
Sometimes, it can be useful to require a function to only accept dense arrays
using either the C (row-major) or Fortran (column-major) ordering. This can be
accomplished via a second template argument with values ``py::array::c_style``
or ``py::array::f_style``.
.. code-block:: cpp
void f(py::array_t<double, py::array::c_style | py::array::forcecast> array);
The ``py::array::forcecast`` argument is the default value of the second
template parameter, and it ensures that non-conforming arguments are converted
into an array satisfying the specified requirements instead of trying the next
function overload.
Structured types
================
In order for ``py::array_t`` to work with structured (record) types, we first
need to register the memory layout of the type. This can be done via
``PYBIND11_NUMPY_DTYPE`` macro, called in the plugin definition code, which
expects the type followed by field names:
.. code-block:: cpp
struct A {
int x;
double y;
};
struct B {
int z;
A a;
};
// ...
PYBIND11_MODULE(test, m) {
// ...
PYBIND11_NUMPY_DTYPE(A, x, y);
PYBIND11_NUMPY_DTYPE(B, z, a);
/* now both A and B can be used as template arguments to py::array_t */
}
The structure should consist of fundamental arithmetic types, ``std::complex``,
previously registered substructures, and arrays of any of the above. Both C++
arrays and ``std::array`` are supported. While there is a static assertion to
prevent many types of unsupported structures, it is still the user's
responsibility to use only "plain" structures that can be safely manipulated as
raw memory without violating invariants.
Vectorizing functions
=====================
Suppose we want to bind a function with the following signature to Python so
that it can process arbitrary NumPy array arguments (vectors, matrices, general
N-D arrays) in addition to its normal arguments:
.. code-block:: cpp
double my_func(int x, float y, double z);
After including the ``pybind11/numpy.h`` header, this is extremely simple:
.. code-block:: cpp
m.def("vectorized_func", py::vectorize(my_func));
Invoking the function like below causes 4 calls to be made to ``my_func`` with
each of the array elements. The significant advantage of this compared to
solutions like ``numpy.vectorize()`` is that the loop over the elements runs
entirely on the C++ side and can be crunched down into a tight, optimized loop
by the compiler. The result is returned as a NumPy array of type
``numpy.dtype.float64``.
.. code-block:: pycon
>>> x = np.array([[1, 3],[5, 7]])
>>> y = np.array([[2, 4],[6, 8]])
>>> z = 3
>>> result = vectorized_func(x, y, z)
The scalar argument ``z`` is transparently replicated 4 times. The input
arrays ``x`` and ``y`` are automatically converted into the right types (they
are of type ``numpy.dtype.int64`` but need to be ``numpy.dtype.int32`` and
``numpy.dtype.float32``, respectively).
.. note::
Only arithmetic, complex, and POD types passed by value or by ``const &``
reference are vectorized; all other arguments are passed through as-is.
Functions taking rvalue reference arguments cannot be vectorized.
In cases where the computation is too complicated to be reduced to
``vectorize``, it will be necessary to create and access the buffer contents
manually. The following snippet contains a complete example that shows how this
works (the code is somewhat contrived, since it could have been done more
simply using ``vectorize``).
.. code-block:: cpp
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
namespace py = pybind11;
py::array_t<double> add_arrays(py::array_t<double> input1, py::array_t<double> input2) {
auto buf1 = input1.request(), buf2 = input2.request();
if (buf1.ndim != 1 || buf2.ndim != 1)
throw std::runtime_error("Number of dimensions must be one");
if (buf1.size != buf2.size)
throw std::runtime_error("Input shapes must match");
/* No pointer is passed, so NumPy will allocate the buffer */
auto result = py::array_t<double>(buf1.size);
auto buf3 = result.request();
double *ptr1 = (double *) buf1.ptr,
*ptr2 = (double *) buf2.ptr,
*ptr3 = (double *) buf3.ptr;
for (size_t idx = 0; idx < buf1.shape[0]; idx++)
ptr3[idx] = ptr1[idx] + ptr2[idx];
return result;
}
PYBIND11_MODULE(test, m) {
m.def("add_arrays", &add_arrays, "Add two NumPy arrays");
}
.. seealso::
The file :file:`tests/test_numpy_vectorize.cpp` contains a complete
example that demonstrates using :func:`vectorize` in more detail.
Direct access
=============
For performance reasons, particularly when dealing with very large arrays, it
is often desirable to directly access array elements without internal checking
of dimensions and bounds on every access when indices are known to be already
valid. To avoid such checks, the ``array`` class and ``array_t<T>`` template
class offer an unchecked proxy object that can be used for this unchecked
access through the ``unchecked<N>`` and ``mutable_unchecked<N>`` methods,
where ``N`` gives the required dimensionality of the array:
.. code-block:: cpp
m.def("sum_3d", [](py::array_t<double> x) {
auto r = x.unchecked<3>(); // x must have ndim = 3; can be non-writeable
double sum = 0;
for (ssize_t i = 0; i < r.shape(0); i++)
for (ssize_t j = 0; j < r.shape(1); j++)
for (ssize_t k = 0; k < r.shape(2); k++)
sum += r(i, j, k);
return sum;
});
m.def("increment_3d", [](py::array_t<double> x) {
auto r = x.mutable_unchecked<3>(); // Will throw if ndim != 3 or flags.writeable is false
for (ssize_t i = 0; i < r.shape(0); i++)
for (ssize_t j = 0; j < r.shape(1); j++)
for (ssize_t k = 0; k < r.shape(2); k++)
r(i, j, k) += 1.0;
}, py::arg().noconvert());
To obtain the proxy from an ``array`` object, you must specify both the data
type and number of dimensions as template arguments, such as ``auto r =
myarray.mutable_unchecked<float, 2>()``.
If the number of dimensions is not known at compile time, you can omit the
dimensions template parameter (i.e. calling ``arr_t.unchecked()`` or
``arr.unchecked<T>()``. This will give you a proxy object that works in the
same way, but results in less optimizable code and thus a small efficiency
loss in tight loops.
Note that the returned proxy object directly references the array's data, and
only reads its shape, strides, and writeable flag when constructed. You must
take care to ensure that the referenced array is not destroyed or reshaped for
the duration of the returned object, typically by limiting the scope of the
returned instance.
The returned proxy object supports some of the same methods as ``py::array`` so
that it can be used as a drop-in replacement for some existing, index-checked
uses of ``py::array``:
- ``r.ndim()`` returns the number of dimensions
- ``r.data(1, 2, ...)`` and ``r.mutable_data(1, 2, ...)``` returns a pointer to
the ``const T`` or ``T`` data, respectively, at the given indices. The
latter is only available to proxies obtained via ``a.mutable_unchecked()``.
- ``itemsize()`` returns the size of an item in bytes, i.e. ``sizeof(T)``.
- ``ndim()`` returns the number of dimensions.
- ``shape(n)`` returns the size of dimension ``n``
- ``size()`` returns the total number of elements (i.e. the product of the shapes).
- ``nbytes()`` returns the number of bytes used by the referenced elements
(i.e. ``itemsize()`` times ``size()``).
.. seealso::
The file :file:`tests/test_numpy_array.cpp` contains additional examples
demonstrating the use of this feature.
|
PypiClean
|
/zombie_calculator-0.0.4.3-py3-none-any.whl/zombie_calculator/pack_resources/help/help.md
|
# **出怪计算器** #
----------
## 前言 ##
本计算器是在[@SKOSKX版](https://tieba.baidu.com/p/7713362872)基础上制作的魔改版。相较于原版,有以下特性:
1. **更更更更更高的效率**(高了多少请自行体会)
2. 将原模式1与模式3合并为新模式1,将原模式2与模式4合并为新模式2,原模式5有朝一日不咕的话……(
3. 加了gui(废话
4. 如果觉得该程序不错,不如star一个吧!
## 模式1使用示例 ##
计算1号玩家,PE(存档编号为13)在种子114514下2021-2030旗的出怪:
<img src="mode1.png" title="模式1" />
结果如下:
<img src="mode1Result.png" title="结果" />
## 模式2使用示例
计算节操掉尽的1号玩家PE从第3面旗到第16面旗均出扶梯且不出车丑红的种子:
<img src="mode2_1.png" title="模式2"/>
作出如下设置后依次点击“加入”和“运行计算”,稍等片刻即可得出结果:
<img src="mode2_1Result.png" title="结果" />
另一个例子:
计算节操掉尽的2号玩家PE 3-4F 出丑炸荷叶,5-10F出梯不出车丑,11-12F出车碾盆的种子:
<img src="mode2_2.1.png" title="模式2"/>
输入以上信息后点击“加入”
如法炮制,最终输入完成的示意:
<img src="mode2_2.2.png" title="模式2" style="zoom:85%"/>
运行计算的结果:
<img src="mode2_2Result.png" title="结果"/>
## 关于参数的一些解释
### 用户编号
第几个创建的用户就是几,以本人的游戏举例:
<img src="users.png" title="用户编号"/>
### 存档编号
正常情况下,**DE,NE,PE,FE,RE分别为11,12,13,14,15**
如果使用修改器改变了场景,存档编号以修改前的场景(也是右下角显示的场景)为准
但游戏场景以修改后的场景为准
## 种子如何查看、修改?
1. 在启动窗口点击***修改种子***按钮。窗口启动时会自动查找游戏并获取存档出怪种子,若游戏未启动则会视具体问题在状态栏显示。也可以在游戏启动后点击***查找游戏***与***获取当前种子***手动查找。
2. 如截图所示,当前为种子**0x11**在泳池无尽3-4波下的预览。利用模式一进行检验。
<img src="check.png" title="修改检验"/>
<img src="check_2.png" title="修改检验"/>
现在修改为**0x12**,效果如图。与模式一预测结果相同。
<img src="modify.png" title="修改完成"/>
<img src="modify_2.png" title="修改完成"/>
------
# 开发者文档
### 总述
该程序基于*python3.8&pyqt5*开发,使用*qt_material*作为皮肤。其中cpu密集计算部分(模式2)使用*c++&pybind11*多线程编写充分利用cpu多核资源。该程序较初版程序速度快500~1000倍左右,使得在int32范围内短时间穷举全部种子成为现实。
- seedFinder模块由[@SKOSKX版](https://tieba.baidu.com/p/7713362872)c++重写为多线程程序而来。
- asmInject模块由[@pvztools](https://github.com/lmintlcx/PvZTools)的部分代码改写而来,并且重写为64位。
### seedFinder模块
**该模块必须在python3.8下使用。**在其他版本下使用请自行编译。
- #### 方法
- **`appear(uid:int, mode:int, scene:str, level:int, seed:int)`**用于获取在当前波数下的出怪类型,返回值为list。
- #### 类
- **`requestToSeed(uid:int, mode:int, scene:str, level_beginning:int, level_ending:int, seed:int)`** 用于获取指定条件下的种子。
- ##### 类属性
- **`seed:int`**用于获取当前计算到的种子。**在seed小于0时表示没有找到合适的种子。**
- `stopThread:bool`用于中断计算线程。
- ##### 类方法
- **`calc(idNeeded:list[list], idRefused:list[list]):int`**两个参数均为二维列表,每个元素为每波要求的出怪类型列表(空列表为无要求)。计算时会堵塞线程,为获取实时进度请使用多线程。
### asmInject模块
**该模块必须在python3.8下使用。**在其他版本下使用请自行编译。
- #### 类
- **`seedInject()`**初始化时尝试获取查找一次游戏。
- ##### 类属性
- **`Result`**
- **`NotFound`**没有找到游戏。
- **`WrongVersion`**不支持的游戏版本。
- **`OK`**成功找到游戏。
- **`OpenError`**游戏进程打开错误。
- **`findResult:Result`**在当前状态下的查找游戏状态,Result枚举类型。
- ##### 类方法
- **`getRandomSeed():int`**获取当前游戏存档种子。
- **`setRandomSeed(seed:int)`**设置当前游戏存档种子。
- **`internalSpawn()`**刷新游戏准备时预览。
|
PypiClean
|
/Agrothon-1.3.2.tar.gz/Agrothon-1.3.2/agrothon/tgbot/translations/tamil.py
|
class Language(object):
# Main Menu
MAIN_MENU = "<b>முதன்மை பட்டியல்</b>"
# Open Weather
WEATHER_FETCH = "விவரங்களைப் பெறுதல் தயவுசெய்து காத்திருங்கள்"
WEATHER = """
<b>📍 இடம் :</b><code> {}</code>
<b>🌡️ வெப்ப நிலை : </b><code>{} °C</code>
<b>💨 அழுத்தம் : </b><code>{} Pa</code>
<b>💧 ஈரப்பதம் : </b><code>{} g.m-3</code>
<b>⛅ வானிலை : </b><code>{}</code>
"""
WEATHER_ERR = "<b>பிழை ஏற்பட்டது, பதில் : <code>{}</code> </b>"
# Button Callbacks
MOISTURE = "ஈரம்"
HUMIDITY = "ஈரப்பதம்"
TEMPERATURE = "வெப்ப நிலை"
RAIN = "மழைக்கால நிலை"
PUMP_STATUS = "பம்ப் நிலை"
COMPLETE_INFO = "முழுமையான தகவல்"
QUIT = "விட்டுவிட"
PUMP_OFF = "அனைத்து விடு"
PUMP_ON = "மாறவும்"
BACK = "மீண்டும்"
BOT_PRED = "பாட் ப்ரிடிக்டனை இயக்கவும்"
REFRESH = "நிலையை புதுப்பிக்கவும்"
MOISTURE_SENSOR = """<b>💧 மண்ணில் ஈரப்பதம் (சென்சார் {}): </b><code> {}%</code>\n"""
MOISTURE_RESP = """<b>🕒 கடைசியாக புதுப்பிக்கப்பட்டது: </b><code> {}</code>
<b>🕒 கடைசியாக படித்தது: </b><code> {}</code>
"""
HUMID_RESP = """
<b>⛅ புலத்தில் ஈரப்பதம் : </b><code> {}%</code>
<b>🕒 கடைசியாக புதுப்பிக்கப்பட்டது: </b><code> {}</code>
<b>🕒 கடைசியாக படித்தது: </b><code> {}</code>
"""
TEMPE_RESP = """
<b>🌡️ புலத்தில் தற்காலிகம் : </b><code> {}°C</code>
<b>🕒 கடைசியாக புதுப்பிக்கப்பட்டது: </b><code> {}</code>
<b>🕒 கடைசியாக படித்தத: </b><code> {}</code>
"""
RAIN_YES_RESP = """
<b>பண்ணையில் மழை பெய்கிறது 🌧️ </b>
"""
RAIN_NO_RESP = """
<b>பண்ணையில் மழை பெய்யவில்லை 🌞</b>
"""
COMPLETE_MOISTURE = """<b>💧 ஈரம் (சென்சார் {}): </b><code> {}%</code>\n"""
COMPLETE_RESP = """<b>⛅ ஈரப்பதம் : </b><code> {}%</code>
<b>🌡️ வெப்ப நில : </b><code> {}°C</code>
<b>⛏️ பம்ப் இருக்க வேண்டும் (சென்சார் {}): </b><code> {}</code>
<b>🕒 கடைசியாக புதுப்பிக்கப்பட்டது: </b><code> {}</code>
<b>🕒 கடைசியாக படித்தது: </b><code> {}</code>
"""
# Pump
PUMP_SWITCHED_ON = """
<b>பம்ப் இயங்குகிறது</b>
இயக்கப்பட்டது <code> {}</code>
கடைசி சோதனை : <code> {}</code>
"""
PUMP_SWITCHED_OFF = """
<b>பம்ப் முடக்கப்பட்டுள்ளது</b>
ஆல் மாற்றப்பட்டது <code> {}</code>
கடைசி சோதனை : <code> {}</code>
"""
BOT_ACTIVATED = """<b>🤖 போட் பயன்முறை செயல்படுத்தப்பட்டது</b>
இப்போது நீங்கள் உட்கார்ந்து ஓய்வெடுக்கலாம் மற்றும் உங்கள் 🤖 பண்ணையை நிர்வகிக்க போட் அனுமதிக்கலாம் 🚜
"""
PUMP_BTN_ON = """
<b> ✅ இயங்கும் நிலை மாற்றப்பட்டது </b>
இயக்கப்பட்டது<code> User</code>
"""
PUMP_BTN_OFF = """
<b> ✅ இயங்கும் நிலை மாற்றப்பட்டது </b>
ஆல் மாற்றப்பட்டது<code> User</code>
"""
SETTINGS = "⚙️ அமைப்புகள் ⚙️"
LANG = "🌐 மொழியை மாற்றுங்கள் 🌐"
SELECT_LANG = "விருப்பமான மொழியைத் தேர்ந்தெடுக்கவும்"
LANG_CHANGED = "மொழி ஆங்கிலத்திற்கு மாற்றப்பட்டுள்ளது"
OBJECTS = "பொருள்கள்"
DET_NO = "கண்டறியப்பட்டது"
ALERT_MESSAGE = """
<b>ஊடுருவும் நபர்கள் கண்டறியப்பட்டுள்ளனர் </b>
<b>இல் கண்டறியப்பட்டது</b> : <code> {}</code>
<b>பொருள்கள் எதுவும் கண்டறியப்படவில்லை</b> : <code> {}</code>
<b>மக்கள் யாரும் கண்டறியப்படவில்லை</b> : <code> {}</code>
<pre>{}</pre>
"""
MONTHS = [
"ஏப்ரல்",
"மே",
"ஜூன்",
"ஜூலை",
"ஆகஸ்ட்",
"செப்டம்பர்",
"அக்டோபர்",
"நவம்பர்",
"டிசம்பர்",
]
MONTH = "மாதம்"
RAINFALL = "மழைப்பொழிவு(இல் {})"
RAIN_PREDICT = """
<b>இந்த ஆண்டிற்கான மழைப்பொழிவு கணிப்புகள்</b>
<b>நிலை</b>: <code> {}</code>
<b>மாவட்டம் </b>: <code> {}</code>
<pre>{}</pre>
"""
RAIN_PREDICT_ERR = """
கணிக்கும் போது பிழை
"""
IMAGE_MESSAGE = """
<b>பொருள்கள் கண்டறியப்பட்டுள்ளன </b>
<b>பொருள்கள் எதுவும் கண்டறியப்படவில்லை</b> : <code> {}</code>
<b>மக்கள் யாரும் கண்டறியப்படவில்லை</b> : <code> {}</code>
<pre>{}</pre>
"""
ERR_IMAGE_RESPONSE = """
<b>படத்தில் எதுவும் காணப்படவில்லை</b>
"""
PRED_PUMP_OFF = "மஆஃப்ு"
PRED_PUMP_ON = "ஆன்"
STATS = """
<b>முடிந்தநேரம் :</b><code> {}</code>
<b>வட்டு அளவு :</b><code> {}</code>
<b>பயன்படுத்தப்பட்டது :</b><code> {}</code>
<b>இலவசம் :</b><code> {}</code>
<b>CPU பயன்பாடு :</b><code> {}%</code>
<b>RAM பயன்பாடு :</b><code> {}%</code>
<b>பதிவேற்றப்பட்டது :</b><code> {}</code>
<b>பதிவிறக்கம் செய்யப்பட்டது :</b><code> {}</code>
"""
DL_TG = "டெலிகிராமிலிருந்து பதிவிறக்குகிறது"
PROC_IMAGE = (
"Dசொந்தமாக ஏற்றப்பட்டது, பொருள்களைக் கண்டறிதல் தயவுசெய்து காத்திருங்கள் ..."
)
RESTART = "மறுதொடக்கம், தயவுசெய்து காத்திருங்கள் ...."
RESTART_DONE = "மீண்டும் தொடங்கப்பட்டது வெற்றிகரமாக!"
RESTART_CALLBACK = "மறுதொடக்கம்"
WEATHER_FETCHING = "வானிலை பெறுகிறது, தயவுசெய்து காத்திருங்கள்"
HELP_MESSAGE = """
<code>/{}</code> : உங்கள் நகரத்தின் வானிலை நிலை
<code>/{}</code> : உங்கள் பிராந்தியத்தின் மழையை கணிக்கவும்
<code>/{}</code> : உங்கள் புல நிலையைப் பெற்று உங்கள் பம்பை நிர்வகிக்கவும்
<code>/{}</code> : உங்கள் போட் அமைப்புகளை மாற்றவும்
<code>/{}</code> : சேவையக புள்ளிவிவரங்களைப் பெறுங்கள்
<code>/{}</code> : பிங் சரிபார்க்கவும்
<code>/{}</code> : சேவையகத்தின் பதிவைப் பெறுங்கள்
<code>/{}</code> : சேவையகத்தை மறுதொடக்கம் செய்யுங்கள்
<code>/{}</code> : இந்த செய்தியைப் பெற
<code>ஒரு படத்தில் உள்ள பொருட்களைக் கண்டறிய படத்தை அனுப்பவும்</code>
"""
START = """
ஏய், நான் <code>அக்ரோத்தான்</code>
- உங்கள் பண்ணையை கண்காணிக்கலாம்
- பம்ப் நிலையை ஆன் அல்லது ஆஃப் என மாற்றவும்
- வானிலை கிடைக்கும்
- ஒரு படத்தில் பொருள்களைக் கண்டறியவும்
- உங்கள் பிராந்தியத்திற்கு மழையை கணிக்கவும்
"""
PING_START = "பிங் தொடங்குகிறது"
PING_FINAL = "அளவிடப்பட்ட பிங் : {}"
LANG_SET = "விருப்பமான மொழி தொகுப்பு வெற்றிகரமாக"
PUMP_STATUS_ON = "மீது"
PUMP_STATUS_OFF = "ஆஃப்"
|
PypiClean
|
/melmac-76693-py3-none-any.whl/mlmc/models/lm_mogrifier.py
|
import string
import torch
from .abstracts_lm import LanguageModelAbstract
from ..layers import MogrifierLSTM
from ..representation import map_vocab
class MogrifierLMCharacter(LanguageModelAbstract):
def __init__(self, alphabet=string.ascii_letters+string.punctuation+"1234567890",
hidden_size=128, emb_dim=50, n_layers=1, mogrify_steps=2,
learn_initial_states=True, max_len=128, dropout=0.5, inter_dropout=0.2, **kwargs):
super(MogrifierLMCharacter, self).__init__(**kwargs)
self.max_len = max_len
self.emb_dim = emb_dim
self.n_layers = n_layers
self.mogrify_steps = mogrify_steps
self.learn_initial_states = learn_initial_states
self.hidden_size = hidden_size
self.cell_type = MogrifierLSTM
self.dropout = dropout
self.inter_dropout = inter_dropout
self.lm_layers = torch.nn.ModuleList(
[
self.cell_type(hidden_size, hidden_size, mogrify_steps)
if i > 0 else
self.cell_type(self.emb_dim, hidden_size, mogrify_steps)
for i in range(n_layers)]
)
self.dropout_layer = torch.nn.Dropout(dropout)
self.inter_dropout_layer = torch.nn.Dropout(inter_dropout)
self.alphabet = list(alphabet)
self.alphabet = dict(zip(self.alphabet, range(len(self.alphabet))))
self.vocabulary_size = len(self.alphabet)
self.embedding = torch.nn.Embedding(
num_embeddings=self.vocabulary_size,
embedding_dim=self.emb_dim,
)
self.projection = torch.nn.Linear(self.hidden_size, self.vocabulary_size)
self.build()
def forward(self, x, representations=False):
e = self.embedding(x)
for layer in self.lm_layers:
e = self.inter_dropout_layer(e)
e, rep = layer(e)
if representations:
return e, rep[0]
e = self.dropout_layer(e)[:, -1, :]
return self.projection(e)
def transform(self, s):
return torch.tensor([[self.alphabet[x] for x in m] for m in s]).squeeze(-1)
def encode(self, s):
return self.transform([x for x in s if x in self.alphabet.keys()]).tolist()
def decode(self, s):
alphabet_rev = {v:k for k,v in self.alphabet.items()}
return "".join([alphabet_rev[x] for x in s])
class MogrifierLMWord(LanguageModelAbstract):
def __init__(self, word_list, hidden_size=128, emb_dim=50, n_layers=1, mogrify_steps=2,
learn_initial_states=True, max_len=128, dropout=0.5, inter_dropout=0.2, **kwargs):
super(MogrifierLMWord, self).__init__(**kwargs)
self.max_len = max_len
self.emb_dim = emb_dim
self.n_layers = n_layers
self.mogrify_steps = mogrify_steps
self.learn_initial_states = learn_initial_states
self.hidden_size = hidden_size
self.cell_type = MogrifierLSTM
self.lm_layers = torch.nn.ModuleList(
[
self.cell_type(hidden_size, hidden_size, mogrify_steps)
if i > 0 else
self.cell_type(self.emb_dim, hidden_size, mogrify_steps)
for i in range(n_layers)]
)
self.dropout = dropout
self.inter_dropout = inter_dropout
self.dropout_layer = torch.nn.Dropout(dropout)
self.inter_dropout_layer = torch.nn.Dropout(inter_dropout)
self.word_list = word_list + ["<UNK_TOKEN>"]
self.word_list = dict(zip(self.word_list, range(1,len(self.word_list)+1)))
self.vocabulary_size = len(self.word_list)+1
self.embedding = torch.nn.Embedding(
num_embeddings=self.vocabulary_size,
embedding_dim=self.emb_dim,
)
self.projection = torch.nn.Linear(self.hidden_size, self.vocabulary_size)
self.build()
def forward(self, x, representations=False):
e = self.embedding(x)
for layer in self.lm_layers:
e = self.inter_dropout_layer(e)
e, rep = layer(e)
if representations:
return e, rep[0]
e = self.dropout_layer(e)[:, -1, :]
return self.projection(e)
def transform(self, s):
if isinstance(s[0], str):
s = [s]
return map_vocab(s,self.word_list,len(s[0])).t().squeeze(-1)
def encode(self, s):
return self.transform([s.split()]).tolist()
def decode(self, s):
return " ".join([list(self.word_list.keys())[x-1] for x in s])
|
PypiClean
|
/pymc3-3.11.5.tar.gz/pymc3-3.11.5/GOVERNANCE.md
|
# Main Governance Document
The Project
===========
The PyMC3 Project (The Project) is an open source software project
affiliated with the 501c3 NumFocus Foundation. The goal of The Project is to
develop open source software and deploy open and public websites and services
for reproducible, exploratory and interactive computing. The Software developed
by The Project is released under the Apache 2 open source license,
developed openly and hosted in public GitHub repositories under the
[GitHub organization](https://github.com/pymc-devs/pymc3). Examples of
Project Software include the PyMC3 code and the Documentation, etc. The Services run by the
Project consist of public websites and web-services that are hosted
at [http://pymc-devs.github.io/pymc3/](http://pymc-devs.github.io/pymc3/)
The Project is developed by a team of distributed developers, called
Contributors. Contributors are individuals who have contributed code,
documentation, designs or other work to one or more Project repositories.
Anyone can be a Contributor. Contributors can be affiliated with any legal
entity or none. Contributors participate in the project by submitting,
reviewing and discussing GitHub Pull Requests and Issues and participating in
open and public Project discussions on GitHub, Slack, Gitter chat rooms and mailing lists. The foundation of Project participation is openness
and transparency.
There have been over 100 Contributors to the Project, their contributions are listed in the logs of the PyMC repositories as well as those of associated projects.
The Project Community consists of all Contributors and Users of the Project.
Contributors work on behalf of and are responsible to the larger Project
Community and we strive to keep the barrier between Contributors and Users as
low as possible.
The Project is formally affiliated with the 501c3 NumFOCUS Foundation
([http://numfocus.org](http://numfocus.org)), which serves as its fiscal
sponsor, may hold project trademarks and other intellectual property, helps
manage project donations and acts as a parent legal entity. NumFOCUS is the
only legal entity that has a formal relationship with the project (see
Institutional Partners section below).
### Governance
This section describes the governance and leadership model of The Project.
The foundations of Project governance are:
- Openness & Transparency
- Active Contribution
- Institutional Neutrality
Traditionally, Project leadership was provided by a BDFL (Chris Fonnesbeck) and
subset of Contributors, called Core Developers, whose active and consistent
contributions have been recognized by their receiving “commit rights” to the
Project GitHub repositories. In general all Project decisions are made through
consensus among the Core Developers with input from the Community. The BDFL
can, but rarely chooses to, override the Core Developers and make a final
decision on a matter.
While this approach has served us well, as the Project grows and faces more
legal and financial decisions and interacts with other institutions, we see a
need for a more formal governance model. Moving forward The Project leadership
will consist of a BDFL and Steering Council. We view this governance model as
the formalization of what we are already doing, rather than a change in
direction.
BDFL
----
The Project will have a BDFL (Benevolent Dictator for Life), who is currently
Chris Fonnesbeck. As Dictator, the BDFL has the authority to make all final
decisions for The Project. As Benevolent, the BDFL, in practice chooses to
defer that authority to the consensus of the community discussion channels and
the Steering Council (see below). It is expected, and in the past has been the
case, that the BDFL will only rarely assert his/her final authority. Because
rarely used, we refer to BDFL’s final authority as a “special” or “overriding”
vote. When it does occur, the BDFL override typically happens in situations
where there is a deadlock in the Steering Council or if the Steering Council
asks the BDFL to make a decision on a specific matter. To ensure the
benevolence of the BDFL, The Project encourages others to fork the project if
they disagree with the overall direction the BDFL is taking. The BDFL is chair
of the Steering Council (see below) and may delegate his/her authority on a
particular decision or set of decisions to any other Council member at his/her
discretion.
The BDFL can appointing his/her successor, but it is expected that the Steering
Council would be consulted on this decision. If the BDFL is unable to appoint a
successor, the Steering Council will make a suggestion or suggestions to the
Main NumFOCUS Board. While the Steering Council and Main NumFOCUS Board will
work together closely on the BDFL selection process, the Main NUMFOCUS Board
will make the final decision.
Steering Council
----------------
The Project will have a Steering Council that consists of Project Contributors
who have produced contributions that are substantial in quality and quantity,
and sustained over at least one year. The overall role of the Council is to
ensure, through working with the BDFL and taking input from the Community, the
long-term well-being of the project, both technically and as a community.
During the everyday project activities, council members participate in all
discussions, code review and other project activities as peers with all other
Contributors and the Community. In these everyday activities, Council Members
do not have any special power or privilege through their membership on the
Council. However, it is expected that because of the quality and quantity of
their contributions and their expert knowledge of the Project Software and
Services that Council Members will provide useful guidance, both technical and
in terms of project direction, to potentially less experienced contributors.
The Steering Council and its Members play a special role in certain situations.
In particular, the Council may:
- Make decisions about the overall scope, vision and direction of the
project.
- Make decisions about strategic collaborations with other organizations or
individuals.
- Make decisions about specific technical issues, features, bugs and pull
requests. They are the primary mechanism of guiding the code review process
and merging pull requests.
- Make decisions about the Services that are run by The Project and manage
those Services for the benefit of the Project and Community.
- Make decisions when regular community discussion doesn’t produce consensus
on an issue in a reasonable time frame.
The current Steering Council membership comprises:
- Colin Carroll
- Peadar Coyle
- Bill Engels
- Chris Fonnesbeck
- Maxim Kochurov
- Junpeng Lao
- Osvaldo Martin
- Austin Rochford
- Adrian Seyboldt
- Thomas Wiecki
### Council membership
To become eligible for being a Steering Council Member an individual must be a
Project Contributor who has produced contributions that are substantial in
quality and quantity, and sustained over at least one year. Potential Council
Members are nominated by existing Council members and voted upon by the
existing Council after asking if the potential Member is interested and willing
to serve in that capacity. The Council will be initially formed from the set of
existing Core Developers who, as of late 2014, have been significantly active
over the last year.
When considering potential Members, the Council will look at candidates with a
comprehensive view of their contributions. This will include but is not limited
to code, code review, infrastructure work, mailing list and chat participation,
community help/building, education and outreach, design work, etc. We are
deliberately not setting arbitrary quantitative metrics (like “100 commits in
this repo”) to avoid encouraging behavior that plays to the metrics rather than
the project’s overall well-being. We want to encourage a diverse array of
backgrounds, viewpoints and talents in our team, which is why we explicitly do
not define code as the sole metric on which council membership will be
evaluated.
If a Council member becomes inactive in the project for a period of one year,
they will be considered for removal from the Council. Before removal, inactive
Member will be approached by the BDFL to see if they plan on returning to
active participation. If not they will be removed immediately upon a Council
vote. If they plan on returning to active participation soon, they will be
given a grace period of one year. If they don’t return to active participation
within that time period they will be removed by vote of the Council without
further grace period. All former Council members can be considered for
membership again at any time in the future, like any other Project Contributor.
Retired Council members will be listed on the project website, acknowledging
the period during which they were active in the Council.
The Council reserves the right to eject current Members, other than the BDFL,
if they are deemed to be actively harmful to the project’s well-being, and
attempts at communication and conflict resolution have failed.
### Conflict of interest
It is expected that the BDFL and Council Members will be employed at a wide
range of companies, universities and non-profit organizations. Because of this,
it is possible that Members will have conflict of interests. Such conflict of
interests include, but are not limited to:
- Financial interests, such as investments, employment or contracting work,
outside of The Project that may influence their work on The Project.
- Access to proprietary information of their employer that could potentially
leak into their work with the Project.
All members of the Council, BDFL included, shall disclose to the rest of the
Council any conflict of interest they may have. Members with a conflict of
interest in a particular issue may participate in Council discussions on that
issue, but must recuse themselves from voting on the issue. If the BDFL has
recused him/herself for a particular decision, they will appoint a substitute
BDFL for that decision.
### Private communications of the Council
Unless specifically required, all Council discussions and activities will be
public and done in collaboration and discussion with the Project Contributors
and Community. The Council will have a private mailing list that will be used
sparingly and only when a specific matter requires privacy. When private
communications and decisions are needed, the Council will do its best to
summarize those to the Community after eliding personal/private/sensitive
information that should not be posted to the public internet.
### Subcommittees
The Council can create subcommittees that provide leadership and guidance for
specific aspects of the project. Like the Council as a whole, subcommittees
should conduct their business in an open and public manner unless privacy is
specifically called for. Private subcommittee communications should happen on
the main private mailing list of the Council unless specifically called for.
Even if the BDFL does not sit on a specific subcommittee, he/she still retains
override authority on the subcommittee's decisions. However, it is expected that
he/she will appoint a delegate to oversee the subcommittee's decisions, and
explicit intervention from the BDFL will only be sought if the committee
disagrees with the delegate's decision and no resolution is possible within the
subcommittee. This is a different situation from a BDFL delegate for a specific
decision, or a recusal situation, in which the BDFL gives up his/her authority
to someone else in full.
### NumFOCUS Subcommittee
The Council will maintain one narrowly focused subcommittee to manage its
interactions with NumFOCUS.
- The NumFOCUS Subcommittee is comprised of 5 persons who manage project
funding that comes through NumFOCUS. It is expected that these funds will
be spent in a manner that is consistent with the non-profit mission of
NumFOCUS and the direction of the Project as determined by the full
Council.
- This Subcommittee shall NOT make decisions about the direction, scope or
technical direction of the Project.
- This Subcommittee will have 5 members, 4 of whom will be current Council
Members and 1 of whom will be external to the Steering Council. No more
than 2 Subcommitee Members can report to one person through employment or
contracting work (including the reportee, i.e. the reportee + 1 is the
max). This avoids effective majorities resting on one person.
The current NumFOCUS Subcommittee consists of:
- Peadar Coyle
- Chris Fonnesbeck
- John Salvatier
- Jon Sedar
- Thomas Wiecki
### Institutional Partners and Funding
The BDFL and Steering Council are the primary leadership for the project. No
outside institution, individual or legal entity has the ability to own,
control, usurp or influence the project other than by participating in the
Project as Contributors and Council Members. However, because institutions are
the primary funding mechanism for the project, it is important to formally
acknowledge institutional participation in the project. These are Institutional
Partners.
An Institutional Contributor is any individual Project Contributor who
contributes to the project as part of their official duties at an Institutional
Partner. Likewise, an Institutional Council Member is any Project Steering
Council Member who contributes to the project as part of their official duties
at an Institutional Partner.
With these definitions, an Institutional Partner is any recognized legal entity
in the United States or elsewhere that employs at least one Institutional
Contributor or Institutional Council Member. Institutional Partners can be
for-profit or non-profit entities.
Institutions become eligible to become an Institutional Partner by
employing individuals who actively contribute to The Project as part
of their official duties. To state this another way, the only way for
an Institutional Partner to influence the project is by actively
contributing to the open development of the project, on equal terms
with any other member of the community of Contributors and Council
Members. Merely using PyMC3 Software or Services in an
institutional context does not allow an entity to become an
Institutional Partner. Financial gifts do not enable an entity to
become an Institutional Partner. Once an institution becomes eligible
for Institutional Partnership, the Steering Council must nominate and
approve the Partnership.
If an existing Institutional Partner no longer has a contributing employee,
they will be given a one-year grace period for other employees to begin
contributing.
An Institutional Partner is free to pursue funding for their work on The
Project through any legal means. This could involve a non-profit organization
raising money from private foundations and donors or a for-profit company
building proprietary products and services that leverage Project Software and
Services. Funding acquired by Institutional Partners to work on The Project is
called Institutional Funding. However, no funding obtained by an Institutional
Partner can override The Project BDFL and Steering Council. If a Partner has
funding to do PyMC3 work and the Council decides to not pursue that
work as a project, the Partner is free to pursue it on their own. However in
this situation, that part of the Partner’s work will not be under the
PyMC3 banner and cannot use the Project trademarks in a way that
suggests a formal relationship.
To acknowledge institutional contributions, there are two level of Institutional
Partners, with associated benefits:
**Tier 1** = an institution with at least one Institutional Council Member
- Acknowledged on the PyMC websites, in talks and T-shirts.
- Ability to acknowledge their own funding sources on the PyMC
websites, in talks and T-shirts.
- Unlimited participation in the annual Institutional Partners Workshop, held
during the (planned) annual PyMC Project Retreat. This allows the
Institutional Partner to invite as many of their own employees and funding
sources and collaborators as they want, even if they are not project
Contributors or Council Members.
- Ability to influence the project through the participation of their Council
Member.
- Council Members are invited to the bi-annual PyMC Developer Meeting.
**Tier 2** = an institution with at least one Institutional Contributor
- Same benefits as Tier 1 level Partners, but:
- Only Institutional Contributors are invited to the Institutional Partners
Workshop and bi-annual PyMC Developer Meeting
The PyMC3 project currently recognizes Quantopian as a Tier 1 Institutional Partner, with Thomas Wiecki and Adrian Seyboldt as their institutional contributors and council members.
|
PypiClean
|
/asv_runner-0.0.9-py3-none-any.whl/asv_runner/benchmarks/mark.py
|
import functools
import inspect
class SkipNotImplemented(NotImplementedError):
"""
Exception raised to indicate a skipped benchmark.
This exception inherits from `NotImplementedError`. It's used within an ASV
benchmark to skip the current benchmark for certain parameters or conditions
that are not implemented or do not apply.
#### Attributes
**message** (`str`)
: A string that provides a more detailed explanation of the skip reason.
#### Warning
Use of `SkipNotImplemented` is less efficient than the `@skip_for_params`
decorator as the setup for the benchmarks and the benchmarks themselves are
run before the error is raised, thus consuming unnecessary resources. Use
`@skip_for_params` where possible to avoid running the benchmarks that
should be skipped.
#### Notes
This is mainly provided for backwards compatibility with the behavior of asv
before 0.5 wherein individual benchmarks could raise and be skipped. From
0.5 onwards, only the setup function is meant to raise `NotImplemented` for
skipping parameter sets.
#### Example
This exception might be used in a scenario where a benchmark should be
skipped for certain conditions or parameters:
```{code-block} python
class Simple:
params = ([False, True])
param_names = ["ok"]
def time_failure(self, ok):
if ok:
x = 34.2**4.2
else:
raise SkipNotImplemented
```
"""
def __init__(self, message=""):
"""
Initialize a new instance of `SkipNotImplemented`.
#### Parameters
**message** (`str`)
: A string that provides a more detailed explanation of the skip reason.
Optional; if not provided, defaults to an empty string.
"""
self.message = message
super().__init__(self.message)
def skip_for_params(skip_params_list):
"""
Decorator to set skip parameters for a benchmark function.
#### Parameters
**skip_params_list** (`list`):
A list of tuples, each specifying a combination of parameter values that
should cause the benchmark function to be skipped.
#### Returns
**decorator** (`function`):
A decorator function that sets the skip parameters for the benchmark
function.
#### Notes
The `skip_for_params` decorator can be used to specify conditions under
which a benchmark function should be skipped. Each tuple in the list
represents a combination of parameter values which, if received by the
benchmark function, will cause that function to be skipped during the
benchmarking process.
The decorated function's `skip_params` attribute will be set with the
provided skip parameters, which will be used during the benchmarking
process.
Using this decorator is always more efficient than raising a
`SkipNotImplemented` exception within the benchmark function, as the
function setup and execution can be avoided entirely for skipped parameters.
#### Example
```{code-block} python
class Simple:
params = ([False, True])
param_names = ["ok"]
@skip_for_params([(False, )])
def time_failure(self, ok):
if ok:
x = 34.2**4.2
```
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
setattr(wrapper, "skip_params", skip_params_list)
return wrapper
return decorator
def skip_benchmark(func):
"""
Decorator to mark a function as skipped for benchmarking.
#### Parameters
**func** (function)
: The function to be marked as skipped.
#### Returns
**wrapper** (function)
: A wrapped function that is marked to be skipped for benchmarking.
#### Notes
The `skip_benchmark` decorator can be used to mark a specific function as
skipped for benchmarking. When the decorated function is encountered during
benchmarking, it will be skipped and not included in the benchmarking
process.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
setattr(wrapper, "skip_benchmark", True)
return wrapper
def skip_benchmark_if(condition):
"""
Decorator to skip benchmarking of a function if a condition is met.
#### Parameters
**condition** (`bool`)
: A boolean that indicates whether to skip benchmarking. If `True`,
the decorated function will be skipped for benchmarking. If `False`,
the decorated function will be benchmarked as usual.
#### Returns
**decorator** (function)
: A decorator function that sets the condition under which the decorated function
will be skipped for benchmarking.
#### Notes
The `skip_if` decorator can be used to skip the benchmarking of a specific
function if a condition is met. It is faster than raising
`SkipNotImplemented` as it skips the `setup()` as well.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
if condition:
setattr(wrapper, "skip_benchmark", True)
return wrapper
return decorator
def skip_params_if(skip_params_list, condition):
"""
Decorator to set skip parameters for a benchmark function if a condition is met.
#### Parameters
**skip_params_list** (`list`):
A list specifying the skip parameters for the benchmark function.
**condition** (`bool`)
: A boolean that indicates whether to set the skip parameters. If `True`,
the skip parameters will be set for the decorated function. If `False`,
no parameters will be skipped.
#### Returns
**decorator** (function):
A decorator function that sets the skip parameters for the benchmark function
if the condition is met.
#### Notes
The `skip_params_if` decorator can be used to specify skip parameters for a
benchmark function if a condition is met.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
if condition:
setattr(wrapper, "skip_params", skip_params_list)
return wrapper
return decorator
def parameterize_class_with(param_dict):
"""
Class Decorator to set benchmark parameters for a class.
#### Parameters
**param_dict** (`dict`):
A dictionary specifying the parameters for the benchmark class.
The keys represent the parameter names, and the values are lists
of values for those parameters.
#### Returns
**decorator** (function):
A class decorator that sets the parameters for the benchmark functions.
#### Notes
The `parameterize_class_with` decorator can be used to specify parameters for a
benchmark class. The parameters are defined as a dictionary, where keys are
the parameter names and values are lists of respective values. The decorated
class's `params` and `param_names` attributes will be set with the provided
parameters and names, which will be used during the benchmarking process.
This decorator will overwrite any existing `params` and `param_names`
attributes in the class.
"""
def decorator(cls):
if not inspect.isclass(cls):
raise TypeError(
"The parameterize_class_with decorator can only be used with classes"
)
# Handle the single parameter case separately.
if len(param_dict) > 1:
cls.params = list(param_dict.values())
else:
cls.params = list(param_dict.values())[0]
cls.param_names = list(param_dict.keys())
return cls
return decorator
def parameterize_func_with(param_dict):
"""
Function Decorator to set benchmark parameters for a function.
#### Parameters
**param_dict** (`dict`):
A dictionary specifying the parameters for the benchmark function.
The keys represent the parameter names, and the values are lists
of values for those parameters.
#### Returns
**decorator** (function):
A function decorator that sets the parameters for the benchmark function.
#### Notes
The `parameterize_func_with` decorator can be used to specify parameters for a
benchmark function. The parameters are defined as a dictionary, where keys are
the parameter names and values are lists of respective values. The decorated
function's `params` and `param_names` attributes will be set with the provided
parameters and names, which will be used during the benchmarking process.
This decorator will overwrite any existing `params` and `param_names`
attributes in the function, and it should not be used with methods of a class.
"""
def decorator(func):
if inspect.isclass(func) or inspect.ismethod(func):
raise TypeError(
"The parameterize_func_with decorator can only be used with functions"
)
if len(param_dict) > 1:
func.params = list(param_dict.values())
else:
func.params = list(param_dict.values())[0]
func.param_names = list(param_dict.keys())
return func
return decorator
def parameterize(param_dict):
"""
Decorator to set benchmark parameters for a function or a class.
#### Parameters
**param_dict** (`dict`):
A dictionary specifying the parameters for the benchmark.
The keys represent the parameter names, and the values are lists
of values for those parameters.
#### Returns
**decorator** (function):
A function or class decorator that sets the parameters for the benchmark.
#### Notes
The `parameterize` decorator can be used to specify parameters for a
benchmark function or class. The parameters are defined as a dictionary,
where keys are the parameter names and values are lists of respective values.
The decorated function or class's `params` and `param_names` attributes
will be set with the provided parameters and names, which will be used
during the benchmarking process.
"""
def decorator(obj):
if inspect.isclass(obj):
return parameterize_class_with(param_dict)(obj)
elif callable(obj):
return parameterize_func_with(param_dict)(obj)
else:
raise TypeError(
"The parameterize decorator can only be used with functions or classes"
)
return decorator
def timeout_class_at(seconds):
"""
Class Decorator to set timeout for a class.
#### Parameters
**seconds** (`float`)
: The number of seconds after which the class methods should be timed out.
#### Returns
**decorator** (function)
: A class decorator that sets the timeout for the class.
#### Notes
The `timeout_class_at` decorator can be used to specify a timeout for all
methods in a class. The timeout is stored as an attribute on the class and
applies to all its methods. Individual methods can override this timeout by
using the `timeout_func_at` or `timeout_at` decorators.
"""
def decorator(cls):
if not inspect.isclass(cls):
raise TypeError(
"The timeout_class_with decorator can only be used with classes"
)
cls.timeout = seconds
return cls
return decorator
def timeout_func_at(seconds):
"""
Function Decorator to set timeout for a function.
#### Parameters
**seconds** (`float`)
: The number of seconds after which the function should be timed out.
#### Returns
**decorator** (function)
: A function decorator that sets the timeout for the function.
#### Notes
The `timeout_func_at` decorator can be used to specify a timeout for a
specific function. This is particularly useful for benchmarking, where you
might want to stop execution of functions that take too long. The timeout is
stored as an attribute on the function.
"""
def decorator(func):
if inspect.isclass(func):
raise TypeError(
"The timeout_func_with decorator can only be used with functions"
)
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
setattr(wrapper, "timeout", seconds)
return wrapper
return decorator
def timeout_at(seconds):
"""
Decorator to set a timeout for a function or a class.
#### Parameters
**seconds** (`float`)
: The number of seconds after which the function or the class methods should
be timed out.
#### Returns
**decorator** (function)
: A decorator that sets the timeout for the function or the class.
#### Notes
The `timeout_at` decorator can be used to set a specific timeout for a
function or all methods in a class. If applied to a class, the timeout is
stored as an attribute on the class and applies to all its methods.
Individual methods can override this timeout by using the `timeout_func_at`
or `timeout_at` decorators. If applied to a function, the timeout is stored
directly on the function.
"""
def decorator(obj):
if inspect.isclass(obj):
return timeout_class_at(seconds)(obj)
elif callable(obj):
return timeout_func_at(seconds)(obj)
else:
raise TypeError(
"The parameterize decorator can only be used with functions or classes"
)
return decorator
__all__ = [
"parameterize",
"skip_benchmark",
"skip_benchmark_if",
"skip_for_params",
"skip_params_if",
"timeout_at",
]
|
PypiClean
|
/pathlib_mate-1.2.1.tar.gz/pathlib_mate-1.2.1/pathlib_mate/mate_mutate_methods.py
|
import os
import shutil
# for type hint only
try: # pragma: no cover
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from .pathlib2 import Path
except ImportError: # pragma: no cover
pass
class MutateMethods(object):
"""
Provide methods to mutate the Path instance.
"""
# --- methods return another Path ---
def drop_parts(self, n=1):
"""
Drop number of parts from the ends. By default, it is equal to
``self.parent``.
Example::
>>> Path("/usr/bin/python").drop_parts(1)
"/user/bin"
>>> Path("/usr/bin/python").drop_parts(2)
"/user"
:type self: Path
:type n: int
:param n: integer, number of parts you wants to drop from ends.
n has to greater equal than 0.
:rtype: Path
:returns: a new Path object.
"""
return self.__class__(*self.parts[:-n])
def append_parts(self, *parts):
"""
Append some parts to the end of this path.
Example::
>>> Path("/usr/bin/python").append_parts("lib")
"/user/bin/python/lib"
>>> Path("/usr/bin/python").append_parts("lib", "core.py")
"/user/bin/python/lib/core.py"
:type self: Path
:rtype: Path
:returns: a new Path object.
"""
return self.__class__(self, *parts)
def change(
self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None,
):
"""
Return a new :class:`pathlib_mate.pathlib2.Path` object with updated path.
Example::
>>> Path("/Users/alice/test.py").change(new_fname="test1")
/Users/alice/test1.py
>>> Path("/Users/alice/test.py").change(new_ext=".txt")
/Users/alice/test.txt
>>> Path("/Users/alice/test.py").change(new_dirname="bob")
/Users/bob/test.py
>>> Path("/Users/alice/test.py").change(new_dirpath="/tmp")
/tmp/test.py
:type self: Path
:type new_abspath: Union[str, Path]
:type new_dirpath: str
:type new_dirname: str
:type new_basename: str
:type new_fname: str
:type new_ext: str
:rtype: Path
**中文文档**
高级重命名函数, 允许用于根据路径的各个组成部分进行重命名. 但和 os.rename
方法一样, 需要保证母文件夹存在.
"""
if new_abspath is not None:
p = self.__class__(new_abspath)
return p
if (new_dirpath is None) and (new_dirname is not None):
new_dirpath = os.path.join(self.parent.dirpath, new_dirname)
elif (new_dirpath is not None) and (new_dirname is None):
new_dirpath = new_dirpath
elif (new_dirpath is None) and (new_dirname is None):
new_dirpath = self.dirpath
elif (new_dirpath is not None) and (new_dirname is not None):
raise ValueError("Cannot having both new_dirpath and new_dirname!")
if new_basename is None:
if new_fname is None:
new_fname = self.fname
if new_ext is None:
new_ext = self.ext
new_basename = new_fname + new_ext
else:
if new_fname is not None or new_ext is not None:
raise ValueError("Cannot having both new_basename, "
"new_fname, new_ext!")
return self.__class__(new_dirpath, new_basename)
def is_not_exist_or_allow_overwrite(self, overwrite=False): # pragma: no cover
"""
Test whether a file target is not exists or it exists but allow
overwrite.
"""
if (not self.exists()) or (overwrite is True):
return True
else:
return False
def moveto(
self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None,
overwrite=False,
makedirs=False,
):
"""
Similar to :meth:`~pathlib_mate.mate_mutate_methods.MutateMethods.change`
method. However, it move the original path to new location.
:type self: Path
:type new_abspath: Union[str, Path]
:type new_dirpath: str
:type new_dirname: str
:type new_basename: str
:type new_fname: str
:type new_ext: str
:type overwrite: bool
:type makedirs: bool
:rtype: Path
**中文文档**
高级 文件 / 文件夹 移动函数, 允许用于根据路径的各个组成部分进行重命名, 然后移动.
"""
self.assert_exists()
p = self.change(
new_abspath=new_abspath,
new_dirpath=new_dirpath,
new_dirname=new_dirname,
new_basename=new_basename,
new_fname=new_fname,
new_ext=new_ext,
)
if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):
# 如果两个路径不同, 才进行move
if self.abspath != p.abspath:
if makedirs:
parent = p.parent
if not parent.exists():
os.makedirs(parent.abspath)
self.rename(p)
return p
def copyto(
self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None,
overwrite=False,
makedirs=False,
):
"""
Similar to :meth:`~pathlib_mate.mate_mutate_methods.MutateMethods.change`
method. However, it copy the original path to new location.
:type self: Path
:type new_abspath: Union[str, Path]
:type new_dirpath: str
:type new_dirname: str
:type new_basename: str
:type new_fname: str
:type new_ext: str
:type overwrite: bool
:type makedirs: bool
:rtype: Path
**中文文档**
高级 文件 / 文件夹 拷贝函数, 允许用于根据路径的各个组成部分进行重命名, 然后拷贝.
"""
self.assert_exists()
p = self.change(
new_abspath=new_abspath,
new_dirpath=new_dirpath,
new_dirname=new_dirname,
new_basename=new_basename,
new_fname=new_fname,
new_ext=new_ext,
)
if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):
# 如果两个路径不同, 才进行copy
if self.abspath != p.abspath:
try:
shutil.copy(self.abspath, p.abspath)
except IOError as e:
if makedirs:
os.makedirs(p.parent.abspath)
shutil.copy(self.abspath, p.abspath)
else:
raise e
return p
def remove(self):
"""
Remove this file. Won't work if it is a directory.
:type self: Path
"""
self.unlink()
def remove_if_exists(self):
"""
Remove a file or entire directory recursively.
:type self: Path
"""
if self.exists():
if self.is_dir():
shutil.rmtree(self.abspath)
else:
self.remove()
def mkdir_if_not_exists(self):
"""
Make a directory if not exists yet.
:type self: Path
"""
self.mkdir(parents=True, exist_ok=True)
@classmethod
def dir_here(cls, file_var):
"""
Return the directory of the python script that where this method
is called.
Suppose you have a file structure like this::
/Users/myname/test.py
And it is the content of ``test.py``::
from pathlib_mate import Path
dir_here = Path.dir_here(__file__)
print(dir_here) # /Users/myname
:type file_var: str
:param file_var: the __file__ variable
:rtype: Path
"""
return cls(file_var).absolute().parent
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/domain/KoubeiCateringPosSidedishbatchSaveModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.PosDishMaterialModel import PosDishMaterialModel
class KoubeiCateringPosSidedishbatchSaveModel(object):
def __init__(self):
self._dish_ids = None
self._dish_material_list = None
self._merchant_id = None
self._shop_id = None
@property
def dish_ids(self):
return self._dish_ids
@dish_ids.setter
def dish_ids(self, value):
if isinstance(value, list):
self._dish_ids = list()
for i in value:
self._dish_ids.append(i)
@property
def dish_material_list(self):
return self._dish_material_list
@dish_material_list.setter
def dish_material_list(self, value):
if isinstance(value, list):
self._dish_material_list = list()
for i in value:
if isinstance(i, PosDishMaterialModel):
self._dish_material_list.append(i)
else:
self._dish_material_list.append(PosDishMaterialModel.from_alipay_dict(i))
@property
def merchant_id(self):
return self._merchant_id
@merchant_id.setter
def merchant_id(self, value):
self._merchant_id = value
@property
def shop_id(self):
return self._shop_id
@shop_id.setter
def shop_id(self, value):
self._shop_id = value
def to_alipay_dict(self):
params = dict()
if self.dish_ids:
if isinstance(self.dish_ids, list):
for i in range(0, len(self.dish_ids)):
element = self.dish_ids[i]
if hasattr(element, 'to_alipay_dict'):
self.dish_ids[i] = element.to_alipay_dict()
if hasattr(self.dish_ids, 'to_alipay_dict'):
params['dish_ids'] = self.dish_ids.to_alipay_dict()
else:
params['dish_ids'] = self.dish_ids
if self.dish_material_list:
if isinstance(self.dish_material_list, list):
for i in range(0, len(self.dish_material_list)):
element = self.dish_material_list[i]
if hasattr(element, 'to_alipay_dict'):
self.dish_material_list[i] = element.to_alipay_dict()
if hasattr(self.dish_material_list, 'to_alipay_dict'):
params['dish_material_list'] = self.dish_material_list.to_alipay_dict()
else:
params['dish_material_list'] = self.dish_material_list
if self.merchant_id:
if hasattr(self.merchant_id, 'to_alipay_dict'):
params['merchant_id'] = self.merchant_id.to_alipay_dict()
else:
params['merchant_id'] = self.merchant_id
if self.shop_id:
if hasattr(self.shop_id, 'to_alipay_dict'):
params['shop_id'] = self.shop_id.to_alipay_dict()
else:
params['shop_id'] = self.shop_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiCateringPosSidedishbatchSaveModel()
if 'dish_ids' in d:
o.dish_ids = d['dish_ids']
if 'dish_material_list' in d:
o.dish_material_list = d['dish_material_list']
if 'merchant_id' in d:
o.merchant_id = d['merchant_id']
if 'shop_id' in d:
o.shop_id = d['shop_id']
return o
|
PypiClean
|
/haruhi_dl-2021.8.1.tar.gz/haruhi_dl-2021.8.1/haruhi_dl/extractor/line.py
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
int_or_none,
js_to_json,
str_or_none,
)
class LineTVIE(InfoExtractor):
_VALID_URL = r'https?://tv\.line\.me/v/(?P<id>\d+)_[^/]+-(?P<segment>ep\d+-\d+)'
_TESTS = [{
'url': 'https://tv.line.me/v/793123_goodbye-mrblack-ep1-1/list/69246',
'info_dict': {
'id': '793123_ep1-1',
'ext': 'mp4',
'title': 'Goodbye Mr.Black | EP.1-1',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 998.509,
'view_count': int,
},
}, {
'url': 'https://tv.line.me/v/2587507_%E6%B4%BE%E9%81%A3%E5%A5%B3%E9%86%ABx-ep1-02/list/185245',
'only_matching': True,
}]
def _real_extract(self, url):
series_id, segment = re.match(self._VALID_URL, url).groups()
video_id = '%s_%s' % (series_id, segment)
webpage = self._download_webpage(url, video_id)
player_params = self._parse_json(self._search_regex(
r'naver\.WebPlayer\(({[^}]+})\)', webpage, 'player parameters'),
video_id, transform_source=js_to_json)
video_info = self._download_json(
'https://global-nvapis.line.me/linetv/rmcnmv/vod_play_videoInfo.json',
video_id, query={
'videoId': player_params['videoId'],
'key': player_params['key'],
})
stream = video_info['streams'][0]
extra_query = '?__gda__=' + stream['key']['value']
formats = self._extract_m3u8_formats(
stream['source'] + extra_query, video_id, ext='mp4',
entry_protocol='m3u8_native', m3u8_id='hls')
for a_format in formats:
a_format['url'] += extra_query
duration = None
for video in video_info.get('videos', {}).get('list', []):
encoding_option = video.get('encodingOption', {})
abr = video['bitrate']['audio']
vbr = video['bitrate']['video']
tbr = abr + vbr
formats.append({
'url': video['source'],
'format_id': 'http-%d' % int(tbr),
'height': encoding_option.get('height'),
'width': encoding_option.get('width'),
'abr': abr,
'vbr': vbr,
'filesize': video.get('size'),
})
if video.get('duration') and duration is None:
duration = video['duration']
self._sort_formats(formats)
if not formats[0].get('width'):
formats[0]['vcodec'] = 'none'
title = self._og_search_title(webpage)
# like_count requires an additional API request https://tv.line.me/api/likeit/getCount
return {
'id': video_id,
'title': title,
'formats': formats,
'extra_param_to_segment_url': extra_query[1:],
'duration': duration,
'thumbnails': [{'url': thumbnail['source']}
for thumbnail in video_info.get('thumbnails', {}).get('list', [])],
'view_count': video_info.get('meta', {}).get('count'),
}
class LineLiveBaseIE(InfoExtractor):
_API_BASE_URL = 'https://live-api.line-apps.com/web/v4.0/channel/'
def _parse_broadcast_item(self, item):
broadcast_id = compat_str(item['id'])
title = item['title']
is_live = item.get('isBroadcastingNow')
thumbnails = []
for thumbnail_id, thumbnail_url in (item.get('thumbnailURLs') or {}).items():
if not thumbnail_url:
continue
thumbnails.append({
'id': thumbnail_id,
'url': thumbnail_url,
})
channel = item.get('channel') or {}
channel_id = str_or_none(channel.get('id'))
return {
'id': broadcast_id,
'title': self._live_title(title) if is_live else title,
'thumbnails': thumbnails,
'timestamp': int_or_none(item.get('createdAt')),
'channel': channel.get('name'),
'channel_id': channel_id,
'channel_url': 'https://live.line.me/channels/' + channel_id if channel_id else None,
'duration': int_or_none(item.get('archiveDuration')),
'view_count': int_or_none(item.get('viewerCount')),
'comment_count': int_or_none(item.get('chatCount')),
'is_live': is_live,
}
class LineLiveIE(LineLiveBaseIE):
_VALID_URL = r'https?://live\.line\.me/channels/(?P<channel_id>\d+)/broadcast/(?P<id>\d+)'
_TESTS = [{
'url': 'https://live.line.me/channels/4867368/broadcast/16331360',
'md5': 'bc931f26bf1d4f971e3b0982b3fab4a3',
'info_dict': {
'id': '16331360',
'title': '振りコピ講座😙😙😙',
'ext': 'mp4',
'timestamp': 1617095132,
'upload_date': '20210330',
'channel': '白川ゆめか',
'channel_id': '4867368',
'view_count': int,
'comment_count': int,
'is_live': False,
}
}, {
# archiveStatus == 'DELETED'
'url': 'https://live.line.me/channels/4778159/broadcast/16378488',
'only_matching': True,
}]
def _real_extract(self, url):
channel_id, broadcast_id = re.match(self._VALID_URL, url).groups()
broadcast = self._download_json(
self._API_BASE_URL + '%s/broadcast/%s' % (channel_id, broadcast_id),
broadcast_id)
item = broadcast['item']
info = self._parse_broadcast_item(item)
protocol = 'm3u8' if info['is_live'] else 'm3u8_native'
formats = []
for k, v in (broadcast.get(('live' if info['is_live'] else 'archived') + 'HLSURLs') or {}).items():
if not v:
continue
if k == 'abr':
formats.extend(self._extract_m3u8_formats(
v, broadcast_id, 'mp4', protocol,
m3u8_id='hls', fatal=False))
continue
f = {
'ext': 'mp4',
'format_id': 'hls-' + k,
'protocol': protocol,
'url': v,
}
if not k.isdigit():
f['vcodec'] = 'none'
formats.append(f)
if not formats:
archive_status = item.get('archiveStatus')
if archive_status != 'ARCHIVED':
raise ExtractorError('this video has been ' + archive_status.lower(), expected=True)
self._sort_formats(formats)
info['formats'] = formats
return info
class LineLiveChannelIE(LineLiveBaseIE):
_VALID_URL = r'https?://live\.line\.me/channels/(?P<id>\d+)(?!/broadcast/\d+)(?:[/?&#]|$)'
_TEST = {
'url': 'https://live.line.me/channels/5893542',
'info_dict': {
'id': '5893542',
'title': 'いくらちゃん',
'description': 'md5:c3a4af801f43b2fac0b02294976580be',
},
'playlist_mincount': 29
}
def _archived_broadcasts_entries(self, archived_broadcasts, channel_id):
while True:
for row in (archived_broadcasts.get('rows') or []):
share_url = str_or_none(row.get('shareURL'))
if not share_url:
continue
info = self._parse_broadcast_item(row)
info.update({
'_type': 'url',
'url': share_url,
'ie_key': LineLiveIE.ie_key(),
})
yield info
if not archived_broadcasts.get('hasNextPage'):
return
archived_broadcasts = self._download_json(
self._API_BASE_URL + channel_id + '/archived_broadcasts',
channel_id, query={
'lastId': info['id'],
})
def _real_extract(self, url):
channel_id = self._match_id(url)
channel = self._download_json(self._API_BASE_URL + channel_id, channel_id)
return self.playlist_result(
self._archived_broadcasts_entries(channel.get('archivedBroadcasts') or {}, channel_id),
channel_id, channel.get('title'), channel.get('information'))
|
PypiClean
|
/django_amis_render-0.0.8-py3-none-any.whl/django_amis_render/static/amis-editor-demo/n/monaco-editor/min/vs/basic-languages/markdown/markdown.js
|
define("vs/basic-languages/markdown/markdown",["require","exports"],(function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.language=t.conf=void 0,t.conf={comments:{blockComment:["\x3c!--","--\x3e"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:"<",close:">",notIn:["string"]}],surroundingPairs:[{open:"(",close:")"},{open:"[",close:"]"},{open:"`",close:"`"}],folding:{markers:{start:new RegExp("^\\s*\x3c!--\\s*#?region\\b.*--\x3e"),end:new RegExp("^\\s*\x3c!--\\s*#?endregion\\b.*--\x3e")}}},t.language={defaultToken:"",tokenPostfix:".md",control:/[\\`*_\[\]{}()#+\-\.!]/,noncontrol:/[^\\`*_\[\]{}()#+\-\.!]/,escapes:/\\(?:@control)/,jsescapes:/\\(?:[btnfr\\"']|[0-7][0-7]?|[0-3][0-7]{2})/,empty:["area","base","basefont","br","col","frame","hr","img","input","isindex","link","meta","param"],tokenizer:{root:[[/^\s*\|/,"@rematch","@table_header"],[/^(\s{0,3})(#+)((?:[^\\#]|@escapes)+)((?:#+)?)/,["white","keyword","keyword","keyword"]],[/^\s*(=+|\-+)\s*$/,"keyword"],[/^\s*((\*[ ]?)+)\s*$/,"meta.separator"],[/^\s*>+/,"comment"],[/^\s*([\*\-+:]|\d+\.)\s/,"keyword"],[/^(\t|[ ]{4})[^ ].*$/,"string"],[/^\s*~~~\s*((?:\w|[\/\-#])+)?\s*$/,{token:"string",next:"@codeblock"}],[/^\s*```\s*((?:\w|[\/\-#])+).*$/,{token:"string",next:"@codeblockgh",nextEmbedded:"$1"}],[/^\s*```\s*$/,{token:"string",next:"@codeblock"}],{include:"@linecontent"}],table_header:[{include:"@table_common"},[/[^\|]+/,"keyword.table.header"]],table_body:[{include:"@table_common"},{include:"@linecontent"}],table_common:[[/\s*[\-:]+\s*/,{token:"keyword",switchTo:"table_body"}],[/^\s*\|/,"keyword.table.left"],[/^\s*[^\|]/,"@rematch","@pop"],[/^\s*$/,"@rematch","@pop"],[/\|/,{cases:{"@eos":"keyword.table.right","@default":"keyword.table.middle"}}]],codeblock:[[/^\s*~~~\s*$/,{token:"string",next:"@pop"}],[/^\s*```\s*$/,{token:"string",next:"@pop"}],[/.*$/,"variable.source"]],codeblockgh:[[/```\s*$/,{token:"string",next:"@pop",nextEmbedded:"@pop"}],[/[^`]+/,"variable.source"]],linecontent:[[/&\w+;/,"string.escape"],[/@escapes/,"escape"],[/\b__([^\\_]|@escapes|_(?!_))+__\b/,"strong"],[/\*\*([^\\*]|@escapes|\*(?!\*))+\*\*/,"strong"],[/\b_[^_]+_\b/,"emphasis"],[/\*([^\\*]|@escapes)+\*/,"emphasis"],[/`([^\\`]|@escapes)+`/,"variable"],[/\{+[^}]+\}+/,"string.target"],[/(!?\[)((?:[^\]\\]|@escapes)*)(\]\([^\)]+\))/,["string.link","","string.link"]],[/(!?\[)((?:[^\]\\]|@escapes)*)(\])/,"string.link"],{include:"html"}],html:[[/<(\w+)\/>/,"tag"],[/<(\w+)/,{cases:{"@empty":{token:"tag",next:"@tag.$1"},"@default":{token:"tag",next:"@tag.$1"}}}],[/<\/(\w+)\s*>/,{token:"tag"}],[/<!--/,"comment","@comment"]],comment:[[/[^<\-]+/,"comment.content"],[/-->/,"comment","@pop"],[/<!--/,"comment.content.invalid"],[/[<\-]/,"comment.content"]],tag:[[/[ \t\r\n]+/,"white"],[/(type)(\s*=\s*)(")([^"]+)(")/,["attribute.name.html","delimiter.html","string.html",{token:"string.html",switchTo:"@tag.$S2.$4"},"string.html"]],[/(type)(\s*=\s*)(')([^']+)(')/,["attribute.name.html","delimiter.html","string.html",{token:"string.html",switchTo:"@tag.$S2.$4"},"string.html"]],[/(\w+)(\s*=\s*)("[^"]*"|'[^']*')/,["attribute.name.html","delimiter.html","string.html"]],[/\w+/,"attribute.name.html"],[/\/>/,"tag","@pop"],[/>/,{cases:{"$S2==style":{token:"tag",switchTo:"embeddedStyle",nextEmbedded:"text/css"},"$S2==script":{cases:{$S3:{token:"tag",switchTo:"embeddedScript",nextEmbedded:"$S3"},"@default":{token:"tag",switchTo:"embeddedScript",nextEmbedded:"text/javascript"}}},"@default":{token:"tag",next:"@pop"}}}]],embeddedStyle:[[/[^<]+/,""],[/<\/style\s*>/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}],[/</,""]],embeddedScript:[[/[^<]+/,""],[/<\/script\s*>/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}],[/</,""]]}}}));
|
PypiClean
|
/pyspnego-0.9.2-cp311-cp311-win32.whl/spnego/_asn1.py
|
import collections
import datetime
import enum
import struct
import typing
from spnego._text import to_bytes, to_text
ASN1Value = collections.namedtuple("ASN1Value", ["tag_class", "constructed", "tag_number", "b_data"])
"""A representation of an ASN.1 TLV as a Python object.
Defines the ASN.1 Type Length Value (TLV) values as separate objects for easier parsing. This is returned by
:method:`unpack_asn1`.
Attributes:
tag_class (TagClass): The tag class of the TLV.
constructed (bool): Whether the value is constructed or 0, 1, or more element encodings (True) or not (False).
tag_number (Union[TypeTagNumber, int]): The tag number of the value, can be a TypeTagNumber if the tag_class
is `universal` otherwise it's an explicit tag number value.
b_data (bytes): The raw byes of the TLV value.
"""
class TagClass(enum.IntEnum):
universal = 0
application = 1
context_specific = 2
private = 3
@classmethod
def native_labels(cls) -> typing.Dict["TagClass", str]:
return {
TagClass.universal: "Universal",
TagClass.application: "Application",
TagClass.context_specific: "Context-specific",
TagClass.private: "Private",
}
class TypeTagNumber(enum.IntEnum):
end_of_content = 0
boolean = 1
integer = 2
bit_string = 3
octet_string = 4
null = 5
object_identifier = 6
object_descriptor = 7
external = 8
real = 9
enumerated = 10
embedded_pdv = 11
utf8_string = 12
relative_oid = 13
time = 14
reserved = 15
sequence = 16
sequence_of = 16
set = 17
set_of = 17
numeric_string = 18
printable_string = 19
t61_string = 20
videotex_string = 21
ia5_string = 22
utc_time = 23
generalized_time = 24
graphic_string = 25
visible_string = 26
general_string = 27
universal_string = 28
character_string = 29
bmp_string = 30
date = 31
time_of_day = 32
date_time = 33
duration = 34
oid_iri = 35
relative_oid_iri = 36
@classmethod
def native_labels(cls) -> typing.Dict[int, str]:
return {
TypeTagNumber.end_of_content: "End-of-Content (EOC)",
TypeTagNumber.boolean: "BOOLEAN",
TypeTagNumber.integer: "INTEGER",
TypeTagNumber.bit_string: "BIT STRING",
TypeTagNumber.octet_string: "OCTET STRING",
TypeTagNumber.null: "NULL",
TypeTagNumber.object_identifier: "OBJECT IDENTIFIER",
TypeTagNumber.object_descriptor: "Object Descriptor",
TypeTagNumber.external: "EXTERNAL",
TypeTagNumber.real: "REAL (float)",
TypeTagNumber.enumerated: "ENUMERATED",
TypeTagNumber.embedded_pdv: "EMBEDDED PDV",
TypeTagNumber.utf8_string: "UTF8String",
TypeTagNumber.relative_oid: "RELATIVE-OID",
TypeTagNumber.time: "TIME",
TypeTagNumber.reserved: "RESERVED",
TypeTagNumber.sequence: "SEQUENCE or SEQUENCE OF",
TypeTagNumber.set: "SET or SET OF",
TypeTagNumber.numeric_string: "NumericString",
TypeTagNumber.printable_string: "PrintableString",
TypeTagNumber.t61_string: "T61String",
TypeTagNumber.videotex_string: "VideotexString",
TypeTagNumber.ia5_string: "IA5String",
TypeTagNumber.utc_time: "UTCTime",
TypeTagNumber.generalized_time: "GeneralizedTime",
TypeTagNumber.graphic_string: "GraphicString",
TypeTagNumber.visible_string: "VisibleString",
TypeTagNumber.general_string: "GeneralString",
TypeTagNumber.universal_string: "UniversalString",
TypeTagNumber.character_string: "CHARACTER",
TypeTagNumber.bmp_string: "BMPString",
TypeTagNumber.date: "DATE",
TypeTagNumber.time_of_day: "TIME-OF-DAY",
TypeTagNumber.date_time: "DATE-TIME",
TypeTagNumber.duration: "DURATION",
TypeTagNumber.oid_iri: "OID-IRI",
TypeTagNumber.relative_oid_iri: "RELATIVE-OID-IRI",
}
def extract_asn1_tlv(
tlv: typing.Union[bytes, ASN1Value],
tag_class: TagClass,
tag_number: typing.Union[int, TypeTagNumber],
) -> bytes:
"""Extract the bytes and validates the existing tag of an ASN.1 value."""
if isinstance(tlv, ASN1Value):
if tag_class == TagClass.universal:
label_name = TypeTagNumber.native_labels().get(tag_number, "Unknown tag type")
msg = "Invalid ASN.1 %s tags, actual tag class %s and tag number %s" % (
label_name,
f"{type(tlv.tag_class).__name__}.{tlv.tag_class.name}",
f"{type(tlv.tag_number).__name__}.{tlv.tag_number.name}"
if isinstance(tlv.tag_number, TypeTagNumber)
else tlv.tag_number,
)
else:
msg = "Invalid ASN.1 tags, actual tag %s and number %s, expecting class %s and number %s" % (
f"{type(tlv.tag_class).__name__}.{tlv.tag_class.name}",
f"{type(tlv.tag_number).__name__}.{tlv.tag_number.name}"
if isinstance(tlv.tag_number, TypeTagNumber)
else tlv.tag_number,
f"{type(tag_class).__name__}.{tag_class.name}",
f"{type(tag_number).__name__}.{tag_number.name}"
if isinstance(tag_number, TypeTagNumber)
else tag_number,
)
if tlv.tag_class != tag_class or tlv.tag_number != tag_number:
raise ValueError(msg)
return tlv.b_data
return tlv
def get_sequence_value(
sequence: typing.Dict[int, ASN1Value],
tag: int,
structure_name: str,
field_name: typing.Optional[str] = None,
unpack_func: typing.Optional[typing.Callable[[typing.Union[bytes, ASN1Value]], typing.Any]] = None,
) -> typing.Any:
"""Gets an optional tag entry in a tagged sequence will a further unpacking of the value."""
if tag not in sequence:
return
if not unpack_func:
return sequence[tag]
try:
return unpack_func(sequence[tag])
except ValueError as e:
where = "%s in %s" % (field_name, structure_name) if field_name else structure_name
raise ValueError("Failed unpacking %s: %s" % (where, str(e))) from e
def pack_asn1(
tag_class: TagClass,
constructed: bool,
tag_number: typing.Union[TypeTagNumber, int],
b_data: bytes,
) -> bytes:
"""Pack the ASN.1 value into the ASN.1 bytes.
Will pack the raw bytes into an ASN.1 Type Length Value (TLV) value. A TLV is in the form:
| Identifier Octet(s) | Length Octet(s) | Data Octet(s) |
Args:
tag_class: The tag class of the data.
constructed: Whether the data is constructed (True), i.e. contains 0, 1, or more element encodings, or is
primitive (False).
tag_number: The type tag number if tag_class is universal else the explicit tag number of the TLV.
b_data: The encoded value to pack into the ASN.1 TLV.
Returns:
bytes: The ASN.1 value as raw bytes.
"""
b_asn1_data = bytearray()
# ASN.1 Identifier octet is
#
# | Octet 1 | | Octet 2 |
# | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 | | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 |
# | Class | P/C | Tag Number (0-30) | | More | Tag number |
#
# If Tag Number is >= 31 the first 5 bits are 1 and the 2nd octet is used to encode the length.
if tag_class < 0 or tag_class > 3:
raise ValueError("tag_class must be between 0 and 3")
identifier_octets = tag_class << 6
identifier_octets |= (1 if constructed else 0) << 5
if tag_number < 31:
identifier_octets |= tag_number
b_asn1_data.append(identifier_octets)
else:
# Set the first 5 bits of the first octet to 1 and encode the tag number in subsequent octets.
identifier_octets |= 31
b_asn1_data.append(identifier_octets)
b_asn1_data.extend(_pack_asn1_octet_number(tag_number))
# ASN.1 Length octet for DER encoding is always in the definite form. This form packs the lengths in the following
# octet structure:
#
# | Octet 1 | | Octet n |
# | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 | | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 |
# | Long form | Short = length, Long = num octets | | Big endian length for long |
#
# Basically if the length < 127 it's encoded in the first octet, otherwise the first octet 7 bits indicates how
# many subsequent octets were used to encode the length.
length = len(b_data)
if length < 128:
b_asn1_data.append(length)
else:
length_octets = bytearray()
while length:
length_octets.append(length & 0b11111111)
length >>= 8
# Reverse the octets so the higher octets are first, add the initial length octet with the MSB set and add them
# all to the main ASN.1 byte array.
length_octets.reverse()
b_asn1_data.append(len(length_octets) | 0b10000000)
b_asn1_data.extend(length_octets)
return bytes(b_asn1_data) + b_data
def pack_asn1_bit_string(
value: bytes,
tag: bool = True,
) -> bytes:
# First octet is the number of unused bits in the last octet from the LSB.
b_data = b"\x00" + value
if tag:
b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.bit_string, b_data)
return b_data
def pack_asn1_enumerated(
value: int,
tag: bool = True,
) -> bytes:
"""Packs an int into an ASN.1 ENUMERATED byte value with optional universal tagging."""
b_data = pack_asn1_integer(value, tag=False)
if tag:
b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.enumerated, b_data)
return b_data
def pack_asn1_general_string(
value: typing.Union[str, bytes],
tag: bool = True,
encoding: str = "ascii",
) -> bytes:
"""Packs an string value into an ASN.1 GeneralString byte value with optional universal tagging."""
b_data = to_bytes(value, encoding=encoding)
if tag:
b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.general_string, b_data)
return b_data
def pack_asn1_integer(
value: int,
tag: bool = True,
) -> bytes:
"""Packs an int value into an ASN.1 INTEGER byte value with optional universal tagging."""
# Thanks to https://github.com/andrivet/python-asn1 for help with the negative value logic.
is_negative = False
limit = 0x7F
if value < 0:
value = -value
is_negative = True
limit = 0x80
b_int = bytearray()
while value > limit:
val = value & 0xFF
if is_negative:
val = 0xFF - val
b_int.append(val)
value >>= 8
b_int.append(((0xFF - value) if is_negative else value) & 0xFF)
if is_negative:
for idx, val in enumerate(b_int):
if val < 0xFF:
b_int[idx] += 1
break
b_int[idx] = 0
if is_negative and b_int[-1] == 0x7F: # Two's complement corner case
b_int.append(0xFF)
b_int.reverse()
b_value = bytes(b_int)
if tag:
b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.integer, b_value)
return b_value
def pack_asn1_object_identifier(
oid: str,
tag: bool = True,
) -> bytes:
"""Packs an str value into an ASN.1 OBJECT IDENTIFIER byte value with optional universal tagging."""
b_oid = bytearray()
oid_split = [int(i) for i in oid.split(".")]
if len(oid_split) < 2:
raise ValueError("An OID must have 2 or more elements split by '.'")
# The first byte of the OID is the first 2 elements (x.y) as (x * 40) + y
b_oid.append((oid_split[0] * 40) + oid_split[1])
for val in oid_split[2:]:
b_oid.extend(_pack_asn1_octet_number(val))
b_value = bytes(b_oid)
if tag:
b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.object_identifier, b_value)
return b_value
def pack_asn1_octet_string(
b_data: bytes,
tag: bool = True,
) -> bytes:
"""Packs an bytes value into an ASN.1 OCTET STRING byte value with optional universal tagging."""
if tag:
b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.octet_string, b_data)
return b_data
def pack_asn1_sequence(
sequence: typing.List[bytes],
tag: bool = True,
) -> bytes:
"""Packs a list of encoded bytes into an ASN.1 SEQUENCE byte value with optional universal tagging."""
b_data = b"".join(sequence)
if tag:
b_data = pack_asn1(TagClass.universal, True, TypeTagNumber.sequence, b_data)
return b_data
def _pack_asn1_octet_number(num: int) -> bytes:
"""Packs an int number into an ASN.1 integer value that spans multiple octets."""
num_octets = bytearray()
while num:
# Get the 7 bit value of the number.
octet_value = num & 0b01111111
# Set the MSB if this isn't the first octet we are processing (overall last octet)
if len(num_octets):
octet_value |= 0b10000000
num_octets.append(octet_value)
# Shift the number by 7 bits as we've just processed them.
num >>= 7
# Finally we reverse the order so the higher octets are first.
num_octets.reverse()
return num_octets
def unpack_asn1(b_data: bytes) -> typing.Tuple[ASN1Value, bytes]:
"""Unpacks an ASN.1 TLV into each element.
Unpacks the raw ASN.1 value into a `ASN1Value` tuple and returns the remaining bytes that are not part of the
ASN.1 TLV.
Args:
b_data: The raw bytes to unpack as an ASN.1 TLV.
Returns:
ASN1Value: The ASN.1 value that is unpacked from the raw bytes passed in.
bytes: Any remaining bytes that are not part of the ASN1Value.
"""
octet1 = struct.unpack("B", b_data[:1])[0]
tag_class = TagClass((octet1 & 0b11000000) >> 6)
constructed = bool(octet1 & 0b00100000)
tag_number = octet1 & 0b00011111
length_offset = 1
if tag_number == 31:
tag_number, octet_count = _unpack_asn1_octet_number(b_data[1:])
length_offset += octet_count
if tag_class == TagClass.universal:
tag_number = TypeTagNumber(tag_number)
b_data = b_data[length_offset:]
length = struct.unpack("B", b_data[:1])[0]
length_octets = 1
if length & 0b10000000:
# If the MSB is set then the length octet just contains the number of octets that encodes the actual length.
length_octets += length & 0b01111111
length = 0
for idx in range(1, length_octets):
octet_val = struct.unpack("B", b_data[idx : idx + 1])[0]
length += octet_val << (8 * (length_octets - 1 - idx))
value = ASN1Value(
tag_class=tag_class,
constructed=constructed,
tag_number=tag_number,
b_data=b_data[length_octets : length_octets + length],
)
return value, b_data[length_octets + length :]
def unpack_asn1_bit_string(value: typing.Union[ASN1Value, bytes]) -> bytes:
"""Unpacks an ASN.1 BIT STRING value."""
b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.bit_string)
# First octet is the number of unused bits in the last octet from the LSB.
unused_bits = struct.unpack("B", b_data[:1])[0]
last_octet = struct.unpack("B", b_data[-2:-1])[0]
last_octet = (last_octet >> unused_bits) << unused_bits
return b_data[1:-1] + struct.pack("B", last_octet)
def unpack_asn1_boolean(value: typing.Union[ASN1Value, bytes]) -> bool:
"""Unpacks an ASN.1 BOOLEAN value."""
b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.boolean)
return b_data != b"\x00"
def unpack_asn1_enumerated(value: typing.Union[ASN1Value, bytes]) -> int:
"""Unpacks an ASN.1 ENUMERATED value."""
b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.enumerated)
return unpack_asn1_integer(b_data)
def unpack_asn1_general_string(value: typing.Union[ASN1Value, bytes]) -> bytes:
"""Unpacks an ASN.1 GeneralString value."""
return extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.general_string)
def unpack_asn1_generalized_time(value: typing.Union[ASN1Value, bytes]) -> datetime.datetime:
"""Unpacks an ASN.1 GeneralizedTime value."""
data = to_text(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.generalized_time))
# While ASN.1 can have a timezone encoded, KerberosTime is the only thing we use and it is always in UTC with the
# Z prefix. We strip out the Z because Python 2 doesn't support the %z identifier and add the UTC tz to the object.
# https://www.rfc-editor.org/rfc/rfc4120#section-5.2.3
if data.endswith("Z"):
data = data[:-1]
err = None
for datetime_format in ["%Y%m%d%H%M%S.%f", "%Y%m%d%H%M%S"]:
try:
dt = datetime.datetime.strptime(data, datetime_format)
return dt.replace(tzinfo=datetime.timezone.utc)
except ValueError as e:
err = e
else:
raise err # type: ignore
def unpack_asn1_integer(value: typing.Union[ASN1Value, bytes]) -> int:
"""Unpacks an ASN.1 INTEGER value."""
b_int = bytearray(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.integer))
is_negative = b_int[0] & 0b10000000
if is_negative:
# Get the two's compliment.
for i in range(len(b_int)):
b_int[i] = 0xFF - b_int[i]
for i in range(len(b_int) - 1, -1, -1):
if b_int[i] == 0xFF:
b_int[i - 1] += 1
b_int[i] = 0
break
else:
b_int[i] += 1
break
int_value = 0
for val in b_int:
int_value = (int_value << 8) | val
if is_negative:
int_value *= -1
return int_value
def unpack_asn1_object_identifier(value: typing.Union[ASN1Value, bytes]) -> str:
"""Unpacks an ASN.1 OBJECT IDENTIFIER value."""
b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.object_identifier)
first_element = struct.unpack("B", b_data[:1])[0]
second_element = first_element % 40
ids = [(first_element - second_element) // 40, second_element]
idx = 1
while idx != len(b_data):
oid, octet_len = _unpack_asn1_octet_number(b_data[idx:])
ids.append(oid)
idx += octet_len
return ".".join([str(i) for i in ids])
def unpack_asn1_octet_string(value: typing.Union[ASN1Value, bytes]) -> bytes:
"""Unpacks an ASN.1 OCTET STRING value."""
return extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.octet_string)
def unpack_asn1_sequence(value: typing.Union[ASN1Value, bytes]) -> typing.List[ASN1Value]:
"""Unpacks an ASN.1 SEQUENCE value."""
b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.sequence)
values = []
while b_data:
v, b_data = unpack_asn1(b_data)
values.append(v)
return values
def unpack_asn1_tagged_sequence(value: typing.Union[ASN1Value, bytes]) -> typing.Dict[int, ASN1Value]:
"""Unpacks an ASN.1 SEQUENCE value as a dictionary."""
return dict([(e.tag_number, unpack_asn1(e.b_data)[0]) for e in unpack_asn1_sequence(value)])
def _unpack_asn1_octet_number(b_data: bytes) -> typing.Tuple[int, int]:
"""Unpacks an ASN.1 INTEGER value that can span across multiple octets."""
i = 0
idx = 0
while True:
element = struct.unpack("B", b_data[idx : idx + 1])[0]
idx += 1
i = (i << 7) + (element & 0b01111111)
if not element & 0b10000000:
break
return i, idx # int value and the number of octets used.
|
PypiClean
|
/h2o_pysparkling_2.4-3.42.0.2.post1.tar.gz/h2o_pysparkling_2.4-3.42.0.2.post1/h2o/estimators/rulefit.py
|
from h2o.utils.metaclass import deprecated_params, deprecated_property
import h2o
from h2o.estimators.estimator_base import H2OEstimator
from h2o.exceptions import H2OValueError
from h2o.frame import H2OFrame
from h2o.utils.typechecks import assert_is_type, Enum, numeric
class H2ORuleFitEstimator(H2OEstimator):
"""
RuleFit
Builds a RuleFit on a parsed dataset, for regression or
classification.
"""
algo = "rulefit"
supervised_learning = True
@deprecated_params({'Lambda': 'lambda_'})
def __init__(self,
model_id=None, # type: Optional[Union[None, str, H2OEstimator]]
training_frame=None, # type: Optional[Union[None, str, H2OFrame]]
validation_frame=None, # type: Optional[Union[None, str, H2OFrame]]
seed=-1, # type: int
response_column=None, # type: Optional[str]
ignored_columns=None, # type: Optional[List[str]]
algorithm="auto", # type: Literal["auto", "drf", "gbm"]
min_rule_length=3, # type: int
max_rule_length=3, # type: int
max_num_rules=-1, # type: int
model_type="rules_and_linear", # type: Literal["rules_and_linear", "rules", "linear"]
weights_column=None, # type: Optional[str]
distribution="auto", # type: Literal["auto", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace", "quantile", "huber"]
rule_generation_ntrees=50, # type: int
auc_type="auto", # type: Literal["auto", "none", "macro_ovr", "weighted_ovr", "macro_ovo", "weighted_ovo"]
remove_duplicates=True, # type: bool
lambda_=None, # type: Optional[List[float]]
max_categorical_levels=10, # type: int
):
"""
:param model_id: Destination id for this model; auto-generated if not specified.
Defaults to ``None``.
:type model_id: Union[None, str, H2OEstimator], optional
:param training_frame: Id of the training data frame.
Defaults to ``None``.
:type training_frame: Union[None, str, H2OFrame], optional
:param validation_frame: Id of the validation data frame.
Defaults to ``None``.
:type validation_frame: Union[None, str, H2OFrame], optional
:param seed: Seed for pseudo random number generator (if applicable).
Defaults to ``-1``.
:type seed: int
:param response_column: Response variable column.
Defaults to ``None``.
:type response_column: str, optional
:param ignored_columns: Names of columns to ignore for training.
Defaults to ``None``.
:type ignored_columns: List[str], optional
:param algorithm: The algorithm to use to generate rules.
Defaults to ``"auto"``.
:type algorithm: Literal["auto", "drf", "gbm"]
:param min_rule_length: Minimum length of rules. Defaults to 3.
Defaults to ``3``.
:type min_rule_length: int
:param max_rule_length: Maximum length of rules. Defaults to 3.
Defaults to ``3``.
:type max_rule_length: int
:param max_num_rules: The maximum number of rules to return. defaults to -1 which means the number of rules is
selected
by diminishing returns in model deviance.
Defaults to ``-1``.
:type max_num_rules: int
:param model_type: Specifies type of base learners in the ensemble.
Defaults to ``"rules_and_linear"``.
:type model_type: Literal["rules_and_linear", "rules", "linear"]
:param weights_column: Column with observation weights. Giving some observation a weight of zero is equivalent
to excluding it from the dataset; giving an observation a relative weight of 2 is equivalent to repeating
that row twice. Negative weights are not allowed. Note: Weights are per-row observation weights and do
not increase the size of the data frame. This is typically the number of times a row is repeated, but
non-integer values are supported as well. During training, rows with higher weights matter more, due to
the larger loss function pre-factor. If you set weight = 0 for a row, the returned prediction frame at
that row is zero and this is incorrect. To get an accurate prediction, remove all rows with weight == 0.
Defaults to ``None``.
:type weights_column: str, optional
:param distribution: Distribution function
Defaults to ``"auto"``.
:type distribution: Literal["auto", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace",
"quantile", "huber"]
:param rule_generation_ntrees: Specifies the number of trees to build in the tree model. Defaults to 50.
Defaults to ``50``.
:type rule_generation_ntrees: int
:param auc_type: Set default multinomial AUC type.
Defaults to ``"auto"``.
:type auc_type: Literal["auto", "none", "macro_ovr", "weighted_ovr", "macro_ovo", "weighted_ovo"]
:param remove_duplicates: Whether to remove rules which are identical to an earlier rule. Defaults to true.
Defaults to ``True``.
:type remove_duplicates: bool
:param lambda_: Lambda for LASSO regressor.
Defaults to ``None``.
:type lambda_: List[float], optional
:param max_categorical_levels: For every categorical feature, only use this many most frequent categorical
levels for model training. Only used for categorical_encoding == EnumLimited.
Defaults to ``10``.
:type max_categorical_levels: int
"""
super(H2ORuleFitEstimator, self).__init__()
self._parms = {}
self._id = self._parms['model_id'] = model_id
self.training_frame = training_frame
self.validation_frame = validation_frame
self.seed = seed
self.response_column = response_column
self.ignored_columns = ignored_columns
self.algorithm = algorithm
self.min_rule_length = min_rule_length
self.max_rule_length = max_rule_length
self.max_num_rules = max_num_rules
self.model_type = model_type
self.weights_column = weights_column
self.distribution = distribution
self.rule_generation_ntrees = rule_generation_ntrees
self.auc_type = auc_type
self.remove_duplicates = remove_duplicates
self.lambda_ = lambda_
self.max_categorical_levels = max_categorical_levels
@property
def training_frame(self):
"""
Id of the training data frame.
Type: ``Union[None, str, H2OFrame]``.
"""
return self._parms.get("training_frame")
@training_frame.setter
def training_frame(self, training_frame):
self._parms["training_frame"] = H2OFrame._validate(training_frame, 'training_frame')
@property
def validation_frame(self):
"""
Id of the validation data frame.
Type: ``Union[None, str, H2OFrame]``.
"""
return self._parms.get("validation_frame")
@validation_frame.setter
def validation_frame(self, validation_frame):
self._parms["validation_frame"] = H2OFrame._validate(validation_frame, 'validation_frame')
@property
def seed(self):
"""
Seed for pseudo random number generator (if applicable).
Type: ``int``, defaults to ``-1``.
"""
return self._parms.get("seed")
@seed.setter
def seed(self, seed):
assert_is_type(seed, None, int)
self._parms["seed"] = seed
@property
def response_column(self):
"""
Response variable column.
Type: ``str``.
"""
return self._parms.get("response_column")
@response_column.setter
def response_column(self, response_column):
assert_is_type(response_column, None, str)
self._parms["response_column"] = response_column
@property
def ignored_columns(self):
"""
Names of columns to ignore for training.
Type: ``List[str]``.
"""
return self._parms.get("ignored_columns")
@ignored_columns.setter
def ignored_columns(self, ignored_columns):
assert_is_type(ignored_columns, None, [str])
self._parms["ignored_columns"] = ignored_columns
@property
def algorithm(self):
"""
The algorithm to use to generate rules.
Type: ``Literal["auto", "drf", "gbm"]``, defaults to ``"auto"``.
"""
return self._parms.get("algorithm")
@algorithm.setter
def algorithm(self, algorithm):
assert_is_type(algorithm, None, Enum("auto", "drf", "gbm"))
self._parms["algorithm"] = algorithm
@property
def min_rule_length(self):
"""
Minimum length of rules. Defaults to 3.
Type: ``int``, defaults to ``3``.
"""
return self._parms.get("min_rule_length")
@min_rule_length.setter
def min_rule_length(self, min_rule_length):
assert_is_type(min_rule_length, None, int)
self._parms["min_rule_length"] = min_rule_length
@property
def max_rule_length(self):
"""
Maximum length of rules. Defaults to 3.
Type: ``int``, defaults to ``3``.
"""
return self._parms.get("max_rule_length")
@max_rule_length.setter
def max_rule_length(self, max_rule_length):
assert_is_type(max_rule_length, None, int)
self._parms["max_rule_length"] = max_rule_length
@property
def max_num_rules(self):
"""
The maximum number of rules to return. defaults to -1 which means the number of rules is selected
by diminishing returns in model deviance.
Type: ``int``, defaults to ``-1``.
"""
return self._parms.get("max_num_rules")
@max_num_rules.setter
def max_num_rules(self, max_num_rules):
assert_is_type(max_num_rules, None, int)
self._parms["max_num_rules"] = max_num_rules
@property
def model_type(self):
"""
Specifies type of base learners in the ensemble.
Type: ``Literal["rules_and_linear", "rules", "linear"]``, defaults to ``"rules_and_linear"``.
"""
return self._parms.get("model_type")
@model_type.setter
def model_type(self, model_type):
assert_is_type(model_type, None, Enum("rules_and_linear", "rules", "linear"))
self._parms["model_type"] = model_type
@property
def weights_column(self):
"""
Column with observation weights. Giving some observation a weight of zero is equivalent to excluding it from the
dataset; giving an observation a relative weight of 2 is equivalent to repeating that row twice. Negative
weights are not allowed. Note: Weights are per-row observation weights and do not increase the size of the data
frame. This is typically the number of times a row is repeated, but non-integer values are supported as well.
During training, rows with higher weights matter more, due to the larger loss function pre-factor. If you set
weight = 0 for a row, the returned prediction frame at that row is zero and this is incorrect. To get an
accurate prediction, remove all rows with weight == 0.
Type: ``str``.
"""
return self._parms.get("weights_column")
@weights_column.setter
def weights_column(self, weights_column):
assert_is_type(weights_column, None, str)
self._parms["weights_column"] = weights_column
@property
def distribution(self):
"""
Distribution function
Type: ``Literal["auto", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace",
"quantile", "huber"]``, defaults to ``"auto"``.
"""
return self._parms.get("distribution")
@distribution.setter
def distribution(self, distribution):
assert_is_type(distribution, None, Enum("auto", "bernoulli", "multinomial", "gaussian", "poisson", "gamma", "tweedie", "laplace", "quantile", "huber"))
self._parms["distribution"] = distribution
@property
def rule_generation_ntrees(self):
"""
Specifies the number of trees to build in the tree model. Defaults to 50.
Type: ``int``, defaults to ``50``.
"""
return self._parms.get("rule_generation_ntrees")
@rule_generation_ntrees.setter
def rule_generation_ntrees(self, rule_generation_ntrees):
assert_is_type(rule_generation_ntrees, None, int)
self._parms["rule_generation_ntrees"] = rule_generation_ntrees
@property
def auc_type(self):
"""
Set default multinomial AUC type.
Type: ``Literal["auto", "none", "macro_ovr", "weighted_ovr", "macro_ovo", "weighted_ovo"]``, defaults to
``"auto"``.
"""
return self._parms.get("auc_type")
@auc_type.setter
def auc_type(self, auc_type):
assert_is_type(auc_type, None, Enum("auto", "none", "macro_ovr", "weighted_ovr", "macro_ovo", "weighted_ovo"))
self._parms["auc_type"] = auc_type
@property
def remove_duplicates(self):
"""
Whether to remove rules which are identical to an earlier rule. Defaults to true.
Type: ``bool``, defaults to ``True``.
"""
return self._parms.get("remove_duplicates")
@remove_duplicates.setter
def remove_duplicates(self, remove_duplicates):
assert_is_type(remove_duplicates, None, bool)
self._parms["remove_duplicates"] = remove_duplicates
@property
def lambda_(self):
"""
Lambda for LASSO regressor.
Type: ``List[float]``.
"""
return self._parms.get("lambda")
@lambda_.setter
def lambda_(self, lambda_):
assert_is_type(lambda_, None, numeric, [numeric])
self._parms["lambda"] = lambda_
@property
def max_categorical_levels(self):
"""
For every categorical feature, only use this many most frequent categorical levels for model training. Only used
for categorical_encoding == EnumLimited.
Type: ``int``, defaults to ``10``.
"""
return self._parms.get("max_categorical_levels")
@max_categorical_levels.setter
def max_categorical_levels(self, max_categorical_levels):
assert_is_type(max_categorical_levels, None, int)
self._parms["max_categorical_levels"] = max_categorical_levels
Lambda = deprecated_property('Lambda', lambda_)
def rule_importance(self):
"""
Retrieve rule importances for a Rulefit model
:return: H2OTwoDimTable
"""
if self._model_json["algo"] != "rulefit":
raise H2OValueError("This function is available for Rulefit models only")
kwargs = {}
kwargs["model_id"] = self.model_id
json = h2o.api("POST /3/SignificantRules", data=kwargs)
return json['significant_rules_table']
def predict_rules(self, frame, rule_ids):
"""
Evaluates validity of the given rules on the given data.
:param frame: H2OFrame on which rule validity is to be evaluated
:param rule_ids: string array of rule ids to be evaluated against the frame
:return: H2OFrame with a column per each input ruleId, representing a flag whether given rule is applied to the observation or not.
"""
from h2o.frame import H2OFrame
from h2o.utils.typechecks import assert_is_type
from h2o.expr import ExprNode
assert_is_type(frame, H2OFrame)
return H2OFrame._expr(expr=ExprNode("rulefit.predict.rules", self, frame, rule_ids))
|
PypiClean
|
/cmsplugin-form-handler-0.2.0.tar.gz/cmsplugin-form-handler-0.2.0/docs/reference/templatetags.rst
|
--------------------------------------------------------------
:mod:`cmsplugin_form_handler.templatetags.cmsplugin_form_tags`
--------------------------------------------------------------
.. module:: cmsplugin_form_handler.templatetags.cmsplugin_form_tags
This module contains template tags that are provided by this package.
.. templatetag:: form_action
form_action
-----------
This template tag provides the URL for the form action. It simply returns the
correct URL to use for submitting the form. It is roughly equivalent to: ::
{% url 'cmsplugin_form_handler:process_form' instance.pk %}
Although simple, the purpose of this tag is to encapsulate the implementation
details of cmsplugin-form-handler so that future changes can occur as necessary
without breaking existing projects.
:param int plugin_pk:
This can be used to specify the ID of the plugin that the view should
use to process the form. If the developer uses CMS development
conventions, this parameter should never be necessary. However, there
may be some cases where the ``render()`` method uses a variable other
than ``instance`` in its context. In these cases, it may be necessary to
use that variable in this template tag as follows: ::
# In this example, the context includes the variable ``plugin``
# that contains the plugin instance to render
{% load cmsplugin_form_tags %}
...
<form action="{% form_action plugin %}" method="post">
|
PypiClean
|
/baiduads_sdk_auto-2023.1.0-py3-none-any.whl/baiduads/platproduct/model/get_product_list_response_wrapper.py
|
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.common.model.api_response_header import ApiResponseHeader
from baiduads.platproduct.model.get_product_list_response_wrapper_body import GetProductListResponseWrapperBody
globals()['ApiResponseHeader'] = ApiResponseHeader
globals()['GetProductListResponseWrapperBody'] = GetProductListResponseWrapperBody
class GetProductListResponseWrapper(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'header': (ApiResponseHeader,), # noqa: E501
'body': (GetProductListResponseWrapperBody,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'header': 'header', # noqa: E501
'body': 'body', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""GetProductListResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (GetProductListResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""GetProductListResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (GetProductListResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/ftw.publisher.sender-2.14.6.tar.gz/ftw.publisher.sender-2.14.6/ftw/publisher/sender/workflows/subscribers.py
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from ftw.publisher.core import belongs_to_parent
from ftw.publisher.sender.utils import is_temporary
from ftw.publisher.sender.workflows.interfaces import DELETE_ACTIONS
from ftw.publisher.sender.workflows.interfaces import IPublisherContextState
from ftw.publisher.sender.workflows.interfaces import IWorkflowConfigs
from ftw.publisher.sender.workflows.interfaces import PUSH_ACTIONS
from Products.CMFPlone.interfaces import IPloneSiteRoot
from zope.component import getMultiAdapter
from zope.component import getUtility
_marker = '_publisher_event_already_handled'
def publish_after_transition(context, event):
""" This event handler is executed after each transition and
publishes the object with ftw.publisher on certain transitions.
Also when retracting an object, the object will be published,
since we should not delete anything unless it's delete from the
sender instance too. This is necessary for preventing
inconsistency, which could occur when deleting a folder which
contains published objects on the reciever site.
"""
# the event handler will be run multiple times, so we need to
# remember which event we've already handled.
if getattr(event, _marker, False):
return
else:
setattr(event, _marker, True)
if not event.transition:
return
if is_temporary(context):
return
config = getUtility(IWorkflowConfigs).get_config_for(context)
if config is None:
return
transition = event.transition.__name__
action = config.transitions().get(transition, None)
if action is None:
return
if action in PUSH_ACTIONS:
context.restrictedTraverse('@@publisher.publish')()
elif action in DELETE_ACTIONS:
context.restrictedTraverse('@@publisher.delete')()
def handle_remove_event(context, event):
"""
Before a object is remvoed the event handler crates a remove job.
"""
# the event is notified for every subobject, but we only want to check
# the top object which the users tries to delete
if context is not event.object:
return
if belongs_to_parent(context):
# Do not delete objects belonging to the parent,
# they are deleted when the parent is published.
return
# Find the workflow object by walking up. We may be deleting a file
# within a file-block within a page, where file and file-block have no
# workflow and we check the page workflow.
obj = context
state = None
while not IPloneSiteRoot.providedBy(obj):
state = getMultiAdapter((obj, context.REQUEST),
IPublisherContextState)
if state.has_workflow():
break
else:
obj = aq_parent(aq_inner(obj))
if not state.has_workflow() or not state.has_publisher_config():
# plone site reached without finding a workflow, therefore
# the object was never published.
return
context.restrictedTraverse('@@publisher.delete')(no_response=True)
|
PypiClean
|
/static_data_dp-0.1.1.tar.gz/static_data_dp-0.1.1/static_data_dp/DataStore.py
|
class DataStore:
def __init__(self, data):
self.data = {
"image": data["image"],
"name": data["name"],
"id": data["image"]["full"].split(".")[0],
}
def setImageUrl(self, imageUrl):
self.imageUrl = imageUrl
@property
def image(self):
return (
self.imageUrl
+ self.data["image"]["group"]
+ "/"
+ self.data["image"]["full"]
)
@property
def sprite(self):
return (
self.imageUrl + "sprite/" + self.data["image"]["sprite"],
self.data["image"]["x"],
self.data["image"]["y"],
)
@property
def name(self):
return self.data["name"]
@property
def id(self):
return self.data["id"]
spellToKeyHelper = {0: "Q", 1: "W", 2: "E", 3: "R"}
class Champion(DataStore):
BASE_URL = "http://ddragon.leagueoflegends.com/cdn/"
def __init__(self, data):
self.data = {
"image": data["image"],
"name": data["name"],
"id": data["id"],
"key": data["key"],
"spells": [
{"image": s["image"], "name": s["name"], "id": s["id"]}
for s in data["spells"]
]
if "spells" in data
else [],
}
self.spellById = None
if "spells" in self.data:
self.loadSpells()
def loadSpells(self):
self.spellById = {}
self.spellByName = {}
self.spellByKey = {}
self.spellBySlot = {}
for k, s in enumerate(self.data["spells"]):
spell = Spell(s)
self.spellById[s["id"]] = spell
self.spellByName[s["name"]] = spell
self.spellByKey[spellToKeyHelper[k]] = spell
self.spellBySlot[k + 1] = spell
def spell(self, sp):
if isinstance(sp, int) or sp.isdigit():
return self.spellBySlot[int(sp)]
elif len(sp) == 1:
return self.spellByKey[sp]
elif sp in self.spellByName:
return self.spellByName[sp]
elif sp in self.spellById:
return self.spellById[sp]
def setImageUrl(self, imageUrl):
self.imageUrl = imageUrl
if not self.spellById == None:
for s in self.spellById:
self.spellById[s].setImageUrl(imageUrl)
@property
def key(self):
return self.data["key"]
class Item(DataStore):
pass
class Map(DataStore):
def __init__(self, data):
self.data = {"image": data["image"], "name": data["MapName"]}
class Summoner(DataStore):
def __init__(self, data):
self.data = {
"image": data["image"],
"name": data["name"],
"id": data["id"],
"key": data["key"],
}
@property
def key(self):
return self.data["key"]
class Icon(DataStore):
def __init__(self, data):
self.data = {"image": data["image"], "name": None}
class Spell(DataStore):
pass
class Rune:
def __init__(self, data):
self.data = {"icon": data["icon"], "name": data["name"], "id": data["id"]}
def setImageUrl(self, imageUrl):
self.imageUrl = imageUrl
@property
def image(self):
return self.imageUrl + self.data["icon"]
@property
def name(self):
return self.data["name"]
@property
def id(self):
return self.data["id"]
|
PypiClean
|
/django-fluent-contents-3.0.tar.gz/django-fluent-contents-3.0/fluent_contents/utils/html.py
|
import warnings
from html5lib import HTMLParser, treebuilders, treewalkers
from html5lib.serializer import HTMLSerializer
try:
from html5lib.sanitizer import HTMLSanitizer
except ImportError:
HTMLSanitizer = None
def clean_html(input, sanitize=False):
"""
Takes an HTML fragment and processes it using html5lib to ensure that the HTML is well-formed.
:param sanitize: Remove unwanted HTML tags and attributes.
>>> clean_html("<p>Foo<b>bar</b></p>")
u'<p>Foo<b>bar</b></p>'
>>> clean_html("<p>Foo<b>bar</b><i>Ooops!</p>")
u'<p>Foo<b>bar</b><i>Ooops!</i></p>'
>>> clean_html('<p>Foo<b>bar</b>& oops<a href="#foo&bar">This is a <>link</a></p>')
u'<p>Foo<b>bar</b>& oops<a href=#foo&bar>This is a <>link</a></p>'
"""
parser_kwargs = {}
serializer_kwargs = {}
if sanitize:
if HTMLSanitizer is None:
# new syntax as of 0.99999999/1.0b9 (Released on July 14, 2016)
serializer_kwargs["sanitize"] = True
else:
parser_kwargs["tokenizer"] = HTMLSanitizer
p = HTMLParser(tree=treebuilders.getTreeBuilder("dom"), **parser_kwargs)
dom_tree = p.parseFragment(input)
walker = treewalkers.getTreeWalker("dom")
stream = walker(dom_tree)
s = HTMLSerializer(omit_optional_tags=False, **serializer_kwargs)
return "".join(s.serialize(stream))
def sanitize_html(input):
"""
Removes any unwanted HTML tags and attributes, using html5lib.
.. versionchanged:: 1.1.5
Please use :func:`clean_html` instead with ``sanitize=True``.
>>> sanitize_html("foobar<p>adf<i></p>abc</i>")
u'foobar<p>adf<i></i></p><i>abc</i>'
>>> sanitize_html('foobar<p style="color:red; remove:me; background-image: url(http://example.com/test.php?query_string=bad);">adf<script>alert("Uhoh!")</script><i></p>abc</i>')
u'foobar<p style="color: red;">adf<script>alert("Uhoh!")</script><i></i></p><i>abc</i>'
"""
warnings.warn("Please use clean_html(input, sanitize=True) instead", DeprecationWarning)
return clean_html(input, sanitize=True)
|
PypiClean
|
/POT-0.9.1-cp39-cp39-win_amd64.whl/ot/plot.py
|
import numpy as np
import matplotlib.pylab as pl
from matplotlib import gridspec
def plot1D_mat(a, b, M, title=''):
r""" Plot matrix :math:`\mathbf{M}` with the source and target 1D distribution
Creates a subplot with the source distribution :math:`\mathbf{a}` on the left and
target distribution :math:`\mathbf{b}` on the top. The matrix :math:`\mathbf{M}` is shown in between.
Parameters
----------
a : ndarray, shape (na,)
Source distribution
b : ndarray, shape (nb,)
Target distribution
M : ndarray, shape (na, nb)
Matrix to plot
"""
na, nb = M.shape
gs = gridspec.GridSpec(3, 3)
xa = np.arange(na)
xb = np.arange(nb)
ax1 = pl.subplot(gs[0, 1:])
pl.plot(xb, b, 'r', label='Target distribution')
pl.yticks(())
pl.title(title)
ax2 = pl.subplot(gs[1:, 0])
pl.plot(a, xa, 'b', label='Source distribution')
pl.gca().invert_xaxis()
pl.gca().invert_yaxis()
pl.xticks(())
pl.subplot(gs[1:, 1:], sharex=ax1, sharey=ax2)
pl.imshow(M, interpolation='nearest')
pl.axis('off')
pl.xlim((0, nb))
pl.tight_layout()
pl.subplots_adjust(wspace=0., hspace=0.2)
def plot2D_samples_mat(xs, xt, G, thr=1e-8, **kwargs):
r""" Plot matrix :math:`\mathbf{G}` in 2D with lines using alpha values
Plot lines between source and target 2D samples with a color
proportional to the value of the matrix :math:`\mathbf{G}` between samples.
Parameters
----------
xs : ndarray, shape (ns,2)
Source samples positions
b : ndarray, shape (nt,2)
Target samples positions
G : ndarray, shape (na,nb)
OT matrix
thr : float, optional
threshold above which the line is drawn
**kwargs : dict
parameters given to the plot functions (default color is black if
nothing given)
"""
if ('color' not in kwargs) and ('c' not in kwargs):
kwargs['color'] = 'k'
mx = G.max()
if 'alpha' in kwargs:
scale = kwargs['alpha']
del kwargs['alpha']
else:
scale = 1
for i in range(xs.shape[0]):
for j in range(xt.shape[0]):
if G[i, j] / mx > thr:
pl.plot([xs[i, 0], xt[j, 0]], [xs[i, 1], xt[j, 1]],
alpha=G[i, j] / mx * scale, **kwargs)
|
PypiClean
|
/SentinelOne-1.2.1.tar.gz/SentinelOne-1.2.1/management/mgmtsdk_v2_1/entities/threat_action.py
|
class Enrichments(object):
def __init__(self, **kwargs):
self.id = kwargs.get('id', None)
self.createdAt = kwargs.get('createdAt', None)
self.updatedAt = kwargs.get('updatedAt', None)
self.interfaceType = kwargs.get('interfaceType', None)
self.interfaceGroupingKey = kwargs.get('interfaceGroupingKey', None)
self.interfaceGroupingKeyLogoId = kwargs.get('interfaceGroupingKeyLogoId', None)
self.content = kwargs.get('content', None)
class AvailableActions(object):
def __init__(self, **kwargs):
self.id = kwargs.get('id', None)
self.createdAt = kwargs.get('createdAt', None)
self.updatedAt = kwargs.get('updatedAt', None)
self.actionTitle = kwargs.get('actionTitle', None)
self.actionDescription = kwargs.get('actionDescription', None)
self.actionWarning = kwargs.get('actionWarning', None)
self.interfaceType = kwargs.get('interfaceType', None)
self.interfaceGroupingKey = kwargs.get('interfaceGroupingKey', None)
self.interfaceGroupingKeyLogoId = kwargs.get('interfaceGroupingKeyLogoId', None)
self.targetType = kwargs.get('targetType', None)
self.targetValue = kwargs.get('targetValue', None)
self.customData = kwargs.get('customData', None)
self.lastInitiatedBy = kwargs.get('lastInitiatedBy', None)
self.lastInitiatedByEmail = kwargs.get('lastInitiatedByEmail', None)
self.lastActionStatus = kwargs.get('lastActionStatus',None)
self.lastActionStatusMessage = kwargs.get('lastActionStatusMessage', None)
self.lastStatusUpdatedAt = kwargs.get('lastStatusUpdatedAt', None)
class AvailableActionsCount(object):
def __init__(self, **kwargs):
self.interfaceGroupingKey = kwargs.get('interfaceGroupingKey', None)
self.interfaceGroupingKeyLogoId = kwargs.get('interfaceGroupingKeyLogoId', None)
self.count = kwargs.get('count', None)
class ExecutedActions(object):
def __init__(self, **kwargs):
self.id = kwargs.get('id', None)
self.createdAt = kwargs.get('createdAt', None)
self.updatedAt = kwargs.get('updatedAt', None)
self.actionId = kwargs.get('actionId', None)
self.interfaceGroupingKey = kwargs.get('interfaceGroupingKey', None)
self.interfaceGroupingKeyLogoId = kwargs.get('interfaceGroupingKeyLogoId', None)
self.initiatedBy = kwargs.get('initiatedBy', None)
self.initiatedByEmail = kwargs.get('initiatedByEmail', None)
self.actionStatus = kwargs.get('actionStatus', None)
self.actionStatusMessage = kwargs.get('actionStatusMessage', None)
self.actionTitle = kwargs.get('actionTitle', None)
self.actionTargetName = kwargs.get('actionTargetName', None)
|
PypiClean
|
/bpy_cuda-2.82-cp37-cp37m-win_amd64.whl/bpy_cuda-2.82.data/scripts/2.82/scripts/addons/add_mesh_extra_objects/__init__.py
|
bl_info = {
"name": "Extra Objects",
"author": "Multiple Authors",
"version": (0, 3, 6),
"blender": (2, 80, 0),
"location": "View3D > Add > Mesh",
"description": "Add extra mesh object types",
"warning": "",
"wiki_url": "https://docs.blender.org/manual/en/dev/addons/"
"add_mesh/mesh_extra_objects.html",
"category": "Add Mesh",
}
# Note: Blocks has to be loaded before the WallFactory or the script
# will not work properly after (F8) reload
if "bpy" in locals():
import importlib
importlib.reload(add_mesh_star)
importlib.reload(add_mesh_twisted_torus)
importlib.reload(add_mesh_gemstones)
importlib.reload(add_mesh_gears)
importlib.reload(add_mesh_3d_function_surface)
importlib.reload(add_mesh_round_cube)
importlib.reload(add_mesh_supertoroid)
importlib.reload(add_mesh_pyramid)
importlib.reload(add_mesh_torusknot)
importlib.reload(add_mesh_honeycomb)
importlib.reload(add_mesh_teapot)
importlib.reload(add_mesh_pipe_joint)
importlib.reload(add_mesh_solid)
importlib.reload(add_mesh_round_brilliant)
importlib.reload(add_mesh_menger_sponge)
importlib.reload(add_mesh_vertex)
importlib.reload(add_empty_as_parent)
importlib.reload(add_mesh_beam_builder)
importlib.reload(Blocks)
importlib.reload(Wallfactory)
importlib.reload(add_mesh_triangles)
else:
from . import add_mesh_star
from . import add_mesh_twisted_torus
from . import add_mesh_gemstones
from . import add_mesh_gears
from . import add_mesh_3d_function_surface
from . import add_mesh_round_cube
from . import add_mesh_supertoroid
from . import add_mesh_pyramid
from . import add_mesh_torusknot
from . import add_mesh_honeycomb
from . import add_mesh_teapot
from . import add_mesh_pipe_joint
from . import add_mesh_solid
from . import add_mesh_round_brilliant
from . import add_mesh_menger_sponge
from . import add_mesh_vertex
from . import add_empty_as_parent
from . import add_mesh_beam_builder
from . import Blocks
from . import Wallfactory
from . import add_mesh_triangles
from .add_mesh_rocks import __init__
from .add_mesh_rocks import rockgen
import bpy
from bpy.types import Menu
class VIEW3D_MT_mesh_vert_add(Menu):
# Define the "Single Vert" menu
bl_idname = "VIEW3D_MT_mesh_vert_add"
bl_label = "Single Vert"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.primitive_vert_add",
text="Add Single Vert")
layout.separator()
layout.operator("mesh.primitive_emptyvert_add",
text="Object Origin Only")
layout.operator("mesh.primitive_symmetrical_vert_add",
text="Origin & Vert Mirrored")
layout.operator("mesh.primitive_symmetrical_empty_add",
text="Object Origin Mirrored")
class VIEW3D_MT_mesh_gears_add(Menu):
# Define the "Gears" menu
bl_idname = "VIEW3D_MT_mesh_gears_add"
bl_label = "Gears"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
oper = layout.operator("mesh.primitive_gear", text="Gear")
oper.change = False
oper = layout.operator("mesh.primitive_worm_gear", text="Worm")
oper.change = False
class VIEW3D_MT_mesh_diamonds_add(Menu):
# Define the "Diamonds" menu
bl_idname = "VIEW3D_MT_mesh_diamonds_add"
bl_label = "Diamonds"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
oper = layout.operator("mesh.primitive_brilliant_add", text="Brilliant Diamond")
oper.change = False
oper = layout.operator("mesh.primitive_diamond_add", text="Diamond")
oper.change = False
oper = layout.operator("mesh.primitive_gem_add", text="Gem")
oper.change = False
class VIEW3D_MT_mesh_math_add(Menu):
# Define the "Math Function" menu
bl_idname = "VIEW3D_MT_mesh_math_add"
bl_label = "Math Functions"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.primitive_z_function_surface",
text="Z Math Surface")
layout.operator("mesh.primitive_xyz_function_surface",
text="XYZ Math Surface")
self.layout.operator("mesh.primitive_solid_add", text="Regular Solid")
self.layout.operator("mesh.make_triangle")
class VIEW3D_MT_mesh_extras_add(Menu):
# Define the "Extra Objects" menu
bl_idname = "VIEW3D_MT_mesh_extras_add"
bl_label = "Extras"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
oper = layout.operator("mesh.add_beam", text="Beam Builder")
oper.change = False
oper = layout.operator("mesh.wall_add", text="Wall Factory")
oper.change = False
layout.separator()
oper = layout.operator("mesh.primitive_star_add", text="Simple Star")
oper = layout.operator("mesh.primitive_steppyramid_add", text="Step Pyramid")
oper = layout.operator("mesh.honeycomb_add", text="Honeycomb")
oper = layout.operator("mesh.primitive_teapot_add", text="Teapot+")
oper = layout.operator("mesh.menger_sponge_add", text="Menger Sponge")
class VIEW3D_MT_mesh_torus_add(Menu):
# Define the "Torus Objects" menu
bl_idname = "VIEW3D_MT_mesh_torus_add"
bl_label = "Torus Objects"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.primitive_twisted_torus_add", text="Twisted Torus")
layout.operator("mesh.primitive_supertoroid_add", text="Supertoroid")
layout.operator("mesh.primitive_torusknot_add", text="Torus Knot")
class VIEW3D_MT_mesh_pipe_joints_add(Menu):
# Define the "Pipe Joints" menu
bl_idname = "VIEW3D_MT_mesh_pipe_joints_add"
bl_label = "Pipe Joints"
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.primitive_elbow_joint_add", text="Pipe Elbow")
layout.operator("mesh.primitive_tee_joint_add", text="Pipe T-Joint")
layout.operator("mesh.primitive_wye_joint_add", text="Pipe Y-Joint")
layout.operator("mesh.primitive_cross_joint_add", text="Pipe Cross-Joint")
layout.operator("mesh.primitive_n_joint_add", text="Pipe N-Joint")
# Register all operators and panels
# Define "Extras" menu
def menu_func(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.separator()
layout.menu("VIEW3D_MT_mesh_vert_add",
text="Single Vert", icon="DECORATE")
layout.operator("mesh.primitive_round_cube_add",
text="Round Cube", icon="SPHERE")
layout.menu("VIEW3D_MT_mesh_torus_add",
text="Torus Objects", icon="MESH_TORUS")
layout.separator()
layout.menu("VIEW3D_MT_mesh_math_add",
text="Math Function", icon="PACKAGE")
layout.menu("VIEW3D_MT_mesh_gears_add",
text="Gears", icon="PREFERENCES")
layout.menu("VIEW3D_MT_mesh_pipe_joints_add",
text="Pipe Joints", icon="EMPTY_DATA")
layout.separator()
layout.menu("VIEW3D_MT_mesh_diamonds_add", text="Diamonds")
layout.menu("VIEW3D_MT_mesh_extras_add",
text="Extras")
layout.separator()
layout.operator("object.parent_to_empty",
text="Parent To Empty")
def Extras_contex_menu(self, context):
bl_label = 'Change'
obj = context.object
layout = self.layout
if 'Gear' in obj.data.keys():
props = layout.operator("mesh.primitive_gear", text="Change Gear")
props.change = True
for prm in add_mesh_gears.GearParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'WormGear' in obj.data.keys():
props = layout.operator("mesh.primitive_worm_gear", text="Change WormGear")
props.change = True
for prm in add_mesh_gears.WormGearParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'Beam' in obj.data.keys():
props = layout.operator("mesh.add_beam", text="Change Beam")
props.change = True
for prm in add_mesh_beam_builder.BeamParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'Wall' in obj.data.keys():
props = layout.operator("mesh.wall_add", text="Change Wall")
props.change = True
for prm in Wallfactory.WallParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'ElbowJoint' in obj.data.keys():
props = layout.operator("mesh.primitive_elbow_joint_add", text="Change ElbowJoint")
props.change = True
for prm in add_mesh_pipe_joint.ElbowJointParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'TeeJoint' in obj.data.keys():
props = layout.operator("mesh.primitive_tee_joint_add", text="Change TeeJoint")
props.change = True
for prm in add_mesh_pipe_joint.TeeJointParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'WyeJoint' in obj.data.keys():
props = layout.operator("mesh.primitive_wye_joint_add", text="Change WyeJoint")
props.change = True
for prm in add_mesh_pipe_joint.WyeJointParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'CrossJoint' in obj.data.keys():
props = layout.operator("mesh.primitive_cross_joint_add", text="Change CrossJoint")
props.change = True
for prm in add_mesh_pipe_joint.CrossJointParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'NJoint' in obj.data.keys():
props = layout.operator("mesh.primitive_n_joint_add", text="Change NJoint")
props.change = True
for prm in add_mesh_pipe_joint.NJointParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'Diamond' in obj.data.keys():
props = layout.operator("mesh.primitive_diamond_add", text="Change Diamond")
props.change = True
for prm in add_mesh_gemstones.DiamondParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'Gem' in obj.data.keys():
props = layout.operator("mesh.primitive_gem_add", text="Change Gem")
props.change = True
for prm in add_mesh_gemstones.GemParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
if 'Brilliant' in obj.data.keys():
props = layout.operator("mesh.primitive_brilliant_add", text="Change Brilliant")
props.change = True
for prm in add_mesh_round_brilliant.BrilliantParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
# Register
classes = [
VIEW3D_MT_mesh_vert_add,
VIEW3D_MT_mesh_gears_add,
VIEW3D_MT_mesh_diamonds_add,
VIEW3D_MT_mesh_math_add,
VIEW3D_MT_mesh_extras_add,
VIEW3D_MT_mesh_torus_add,
VIEW3D_MT_mesh_pipe_joints_add,
add_mesh_star.AddStar,
add_mesh_twisted_torus.AddTwistedTorus,
add_mesh_gemstones.AddDiamond,
add_mesh_gemstones.AddGem,
add_mesh_gears.AddGear,
add_mesh_gears.AddWormGear,
add_mesh_3d_function_surface.AddZFunctionSurface,
add_mesh_3d_function_surface.AddXYZFunctionSurface,
add_mesh_round_cube.AddRoundCube,
add_mesh_supertoroid.add_supertoroid,
add_mesh_pyramid.AddPyramid,
add_mesh_torusknot.AddTorusKnot,
add_mesh_honeycomb.add_mesh_honeycomb,
add_mesh_teapot.AddTeapot,
add_mesh_pipe_joint.AddElbowJoint,
add_mesh_pipe_joint.AddTeeJoint,
add_mesh_pipe_joint.AddWyeJoint,
add_mesh_pipe_joint.AddCrossJoint,
add_mesh_pipe_joint.AddNJoint,
add_mesh_solid.Solids,
add_mesh_round_brilliant.MESH_OT_primitive_brilliant_add,
add_mesh_menger_sponge.AddMengerSponge,
add_mesh_vertex.AddVert,
add_mesh_vertex.AddEmptyVert,
add_mesh_vertex.AddSymmetricalEmpty,
add_mesh_vertex.AddSymmetricalVert,
add_empty_as_parent.P2E,
add_empty_as_parent.PreFix,
add_mesh_beam_builder.addBeam,
Wallfactory.add_mesh_wallb,
add_mesh_triangles.MakeTriangle,
]
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
add_mesh_rocks.register()
# Add "Extras" menu to the "Add Mesh" menu and context menu.
bpy.types.VIEW3D_MT_mesh_add.append(menu_func)
bpy.types.VIEW3D_MT_object_context_menu.prepend(Extras_contex_menu)
def unregister():
# Remove "Extras" menu from the "Add Mesh" menu and context menu.
bpy.types.VIEW3D_MT_object_context_menu.remove(Extras_contex_menu)
bpy.types.VIEW3D_MT_mesh_add.remove(menu_func)
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
add_mesh_rocks.unregister()
if __name__ == "__main__":
register()
|
PypiClean
|
/muarch-0.0.6.tar.gz/muarch-0.0.6/README.md
|
MUArch
======
###### Continuous Integration
[](https://travis-ci.com/DanielBok/muarch)
[](https://ci.appveyor.com/project/DanielBok/muarch)
###### Documentation
[](https://muarch.readthedocs.io/en/latest/?badge=latest)
###### Coverage
[](https://coveralls.io/github/DanielBok/muarch?branch=master)
## Installing
Install and update using [pip](https://pip.pypa.io/en/stable/quickstart/) and on conda.
This is a wrapper on top of Kevin Sheppard's [ARCH](https://github.com/bashtage/arch) package. The purpose of which are to:
1. Enable faster Monte Carlo simulation
2. Simulate innovations through copula marginals
In the package, there are 2 classes to aid you - `UArch` and `MUArch`. The `UArch` class can be defined using a similar API to `arch_model` in the original `arch` package. The `MUArch` is a collection of these `UArch` models.
Thus, if you have a function that generates uniform marginals, like a copula, you can create a dependence structure among the different marginals when simulating the GARCH processes.
If you need a copula package, I have one [here](https://github.com/DanielBok/copulae). :)
Example
-------
I'll list out a simple procedure to do AR-GARCH-Copula simulations.
```python
from muarch import MUArch, UArch
from muarch.datasets import load_etf
from copulae import NormalCopula
returns = load_etf() # load returns data
num_assets = returns.shape[1]
# sets up a MUArch model collection where each model defaults to
# mean: AR(1)
# vol: GARCH(1, 1)
# dist: normal
models = MUArch(num_assets, mean='AR', lags=1)
# set first model to AR(1)-GARCH(1, 1) with skewt innovations
models[0] = UArch('AR', lags=1, dist='skewt')
# fit model, if you get complaints regarding non-convergence, you can scale the data up
# using the scale parameter in the UArch or MUArch. i.e. UArch(..., scale=100). This will
# reduce numerical errors. Don't worry, I'll rescale the simulation values subsequently
models.fit(returns)
# Usually you'll want to fit the residuals to the copula, use the copula to generate the
# residuals and subsequently transform it back to returns
residuals = models.residuals() # defaults to return the standardized residuals
cop = NormalCopula(dim=num_assets) # use a normal copula, you could of course use a TCopula
cop.fit(residuals)
# simulate 10 steps into the future, over 4 repetitions. This will return a (10 x 4 x 3) array
models.simulate_mc(10, 4, custom_dist=cop.random)
```
Future Works
------------
This is actually a temporary hack so that others can do GARCH copula simulation. Another issue is that an ARFIMA mean model is not so easily specified (and simulated from) with the original `arch` package. You could specify an ARFIMA (or even just an ARMA model for the matter), fit it separately then use the residuals to fit a zero-mean model (pure GARCH). However, in such a way, the simulation is not so straightforward as you'll have to stitch the simulations from GARCH process and the mean model process back.
|
PypiClean
|
/gamification-engine-0.4.0.tar.gz/gamification-engine-0.4.0/gengine/app/jsscripts/node_modules/eslint/node_modules/inquirer/lib/objects/choices.js
|
'use strict';
var assert = require('assert');
var _ = require('lodash');
var Separator = require('./separator');
var Choice = require('./choice');
/**
* Choices collection
* Collection of multiple `choice` object
* @constructor
* @param {Array} choices All `choice` to keep in the collection
*/
var Choices = module.exports = function (choices, answers) {
this.choices = choices.map(function (val) {
if (val.type === 'separator') {
if (!(val instanceof Separator)) {
val = new Separator(val.line);
}
return val;
}
return new Choice(val, answers);
});
this.realChoices = this.choices
.filter(Separator.exclude)
.filter(function (item) {
return !item.disabled;
});
Object.defineProperty(this, 'length', {
get: function () {
return this.choices.length;
},
set: function (val) {
this.choices.length = val;
}
});
Object.defineProperty(this, 'realLength', {
get: function () {
return this.realChoices.length;
},
set: function () {
throw new Error('Cannot set `realLength` of a Choices collection');
}
});
};
/**
* Get a valid choice from the collection
* @param {Number} selector The selected choice index
* @return {Choice|Undefined} Return the matched choice or undefined
*/
Choices.prototype.getChoice = function (selector) {
assert(_.isNumber(selector));
return this.realChoices[selector];
};
/**
* Get a raw element from the collection
* @param {Number} selector The selected index value
* @return {Choice|Undefined} Return the matched choice or undefined
*/
Choices.prototype.get = function (selector) {
assert(_.isNumber(selector));
return this.choices[selector];
};
/**
* Match the valid choices against a where clause
* @param {Object} whereClause Lodash `where` clause
* @return {Array} Matching choices or empty array
*/
Choices.prototype.where = function (whereClause) {
return _.filter(this.realChoices, whereClause);
};
/**
* Pluck a particular key from the choices
* @param {String} propertyName Property name to select
* @return {Array} Selected properties
*/
Choices.prototype.pluck = function (propertyName) {
return _.map(this.realChoices, propertyName);
};
// Expose usual Array methods
Choices.prototype.indexOf = function () {
return this.choices.indexOf.apply(this.choices, arguments);
};
Choices.prototype.forEach = function () {
return this.choices.forEach.apply(this.choices, arguments);
};
Choices.prototype.filter = function () {
return this.choices.filter.apply(this.choices, arguments);
};
Choices.prototype.push = function () {
var objs = _.map(arguments, function (val) { return new Choice(val); });
this.choices.push.apply(this.choices, objs);
this.realChoices = this.choices.filter(Separator.exclude);
return this.choices;
};
|
PypiClean
|
/notebooksmz-7.0.0-py3-none-any.whl/notebook/static/components/codemirror/src/edit/commands.js
|
import { deleteNearSelection } from "./deleteNearSelection.js"
import { runInOp } from "../display/operations.js"
import { ensureCursorVisible } from "../display/scrolling.js"
import { endOfLine } from "../input/movement.js"
import { clipPos, Pos } from "../line/pos.js"
import { visualLine, visualLineEnd } from "../line/spans.js"
import { getLine, lineNo } from "../line/utils_line.js"
import { Range } from "../model/selection.js"
import { selectAll } from "../model/selection_updates.js"
import { countColumn, sel_dontScroll, sel_move, spaceStr } from "../util/misc.js"
import { getOrder } from "../util/bidi.js"
// Commands are parameter-less actions that can be performed on an
// editor, mostly used for keybindings.
export let commands = {
selectAll: selectAll,
singleSelection: cm => cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll),
killLine: cm => deleteNearSelection(cm, range => {
if (range.empty()) {
let len = getLine(cm.doc, range.head.line).text.length
if (range.head.ch == len && range.head.line < cm.lastLine())
return {from: range.head, to: Pos(range.head.line + 1, 0)}
else
return {from: range.head, to: Pos(range.head.line, len)}
} else {
return {from: range.from(), to: range.to()}
}
}),
deleteLine: cm => deleteNearSelection(cm, range => ({
from: Pos(range.from().line, 0),
to: clipPos(cm.doc, Pos(range.to().line + 1, 0))
})),
delLineLeft: cm => deleteNearSelection(cm, range => ({
from: Pos(range.from().line, 0), to: range.from()
})),
delWrappedLineLeft: cm => deleteNearSelection(cm, range => {
let top = cm.charCoords(range.head, "div").top + 5
let leftPos = cm.coordsChar({left: 0, top: top}, "div")
return {from: leftPos, to: range.from()}
}),
delWrappedLineRight: cm => deleteNearSelection(cm, range => {
let top = cm.charCoords(range.head, "div").top + 5
let rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div")
return {from: range.from(), to: rightPos }
}),
undo: cm => cm.undo(),
redo: cm => cm.redo(),
undoSelection: cm => cm.undoSelection(),
redoSelection: cm => cm.redoSelection(),
goDocStart: cm => cm.extendSelection(Pos(cm.firstLine(), 0)),
goDocEnd: cm => cm.extendSelection(Pos(cm.lastLine())),
goLineStart: cm => cm.extendSelectionsBy(range => lineStart(cm, range.head.line),
{origin: "+move", bias: 1}
),
goLineStartSmart: cm => cm.extendSelectionsBy(range => lineStartSmart(cm, range.head),
{origin: "+move", bias: 1}
),
goLineEnd: cm => cm.extendSelectionsBy(range => lineEnd(cm, range.head.line),
{origin: "+move", bias: -1}
),
goLineRight: cm => cm.extendSelectionsBy(range => {
let top = cm.cursorCoords(range.head, "div").top + 5
return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div")
}, sel_move),
goLineLeft: cm => cm.extendSelectionsBy(range => {
let top = cm.cursorCoords(range.head, "div").top + 5
return cm.coordsChar({left: 0, top: top}, "div")
}, sel_move),
goLineLeftSmart: cm => cm.extendSelectionsBy(range => {
let top = cm.cursorCoords(range.head, "div").top + 5
let pos = cm.coordsChar({left: 0, top: top}, "div")
if (pos.ch < cm.getLine(pos.line).search(/\S/)) return lineStartSmart(cm, range.head)
return pos
}, sel_move),
goLineUp: cm => cm.moveV(-1, "line"),
goLineDown: cm => cm.moveV(1, "line"),
goPageUp: cm => cm.moveV(-1, "page"),
goPageDown: cm => cm.moveV(1, "page"),
goCharLeft: cm => cm.moveH(-1, "char"),
goCharRight: cm => cm.moveH(1, "char"),
goColumnLeft: cm => cm.moveH(-1, "column"),
goColumnRight: cm => cm.moveH(1, "column"),
goWordLeft: cm => cm.moveH(-1, "word"),
goGroupRight: cm => cm.moveH(1, "group"),
goGroupLeft: cm => cm.moveH(-1, "group"),
goWordRight: cm => cm.moveH(1, "word"),
delCharBefore: cm => cm.deleteH(-1, "char"),
delCharAfter: cm => cm.deleteH(1, "char"),
delWordBefore: cm => cm.deleteH(-1, "word"),
delWordAfter: cm => cm.deleteH(1, "word"),
delGroupBefore: cm => cm.deleteH(-1, "group"),
delGroupAfter: cm => cm.deleteH(1, "group"),
indentAuto: cm => cm.indentSelection("smart"),
indentMore: cm => cm.indentSelection("add"),
indentLess: cm => cm.indentSelection("subtract"),
insertTab: cm => cm.replaceSelection("\t"),
insertSoftTab: cm => {
let spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize
for (let i = 0; i < ranges.length; i++) {
let pos = ranges[i].from()
let col = countColumn(cm.getLine(pos.line), pos.ch, tabSize)
spaces.push(spaceStr(tabSize - col % tabSize))
}
cm.replaceSelections(spaces)
},
defaultTab: cm => {
if (cm.somethingSelected()) cm.indentSelection("add")
else cm.execCommand("insertTab")
},
// Swap the two chars left and right of each selection's head.
// Move cursor behind the two swapped characters afterwards.
//
// Doesn't consider line feeds a character.
// Doesn't scan more than one line above to find a character.
// Doesn't do anything on an empty line.
// Doesn't do anything with non-empty selections.
transposeChars: cm => runInOp(cm, () => {
let ranges = cm.listSelections(), newSel = []
for (let i = 0; i < ranges.length; i++) {
if (!ranges[i].empty()) continue
let cur = ranges[i].head, line = getLine(cm.doc, cur.line).text
if (line) {
if (cur.ch == line.length) cur = new Pos(cur.line, cur.ch - 1)
if (cur.ch > 0) {
cur = new Pos(cur.line, cur.ch + 1)
cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2),
Pos(cur.line, cur.ch - 2), cur, "+transpose")
} else if (cur.line > cm.doc.first) {
let prev = getLine(cm.doc, cur.line - 1).text
if (prev) {
cur = new Pos(cur.line, 1)
cm.replaceRange(line.charAt(0) + cm.doc.lineSeparator() +
prev.charAt(prev.length - 1),
Pos(cur.line - 1, prev.length - 1), cur, "+transpose")
}
}
}
newSel.push(new Range(cur, cur))
}
cm.setSelections(newSel)
}),
newlineAndIndent: cm => runInOp(cm, () => {
let sels = cm.listSelections()
for (let i = sels.length - 1; i >= 0; i--)
cm.replaceRange(cm.doc.lineSeparator(), sels[i].anchor, sels[i].head, "+input")
sels = cm.listSelections()
for (let i = 0; i < sels.length; i++)
cm.indentLine(sels[i].from().line, null, true)
ensureCursorVisible(cm)
}),
openLine: cm => cm.replaceSelection("\n", "start"),
toggleOverwrite: cm => cm.toggleOverwrite()
}
function lineStart(cm, lineN) {
let line = getLine(cm.doc, lineN)
let visual = visualLine(line)
if (visual != line) lineN = lineNo(visual)
return endOfLine(true, cm, visual, lineN, 1)
}
function lineEnd(cm, lineN) {
let line = getLine(cm.doc, lineN)
let visual = visualLineEnd(line)
if (visual != line) lineN = lineNo(visual)
return endOfLine(true, cm, line, lineN, -1)
}
function lineStartSmart(cm, pos) {
let start = lineStart(cm, pos.line)
let line = getLine(cm.doc, start.line)
let order = getOrder(line, cm.doc.direction)
if (!order || order[0].level == 0) {
let firstNonWS = Math.max(0, line.text.search(/\S/))
let inWS = pos.line == start.line && pos.ch <= firstNonWS && pos.ch
return Pos(start.line, inWS ? 0 : firstNonWS, start.sticky)
}
return start
}
|
PypiClean
|
/file_groups-0.1.0.tar.gz/file_groups-0.1.0/src/file_handler_compare.py
|
from __future__ import annotations
import os
from pathlib import Path
import re
import logging
from typing import Sequence
from .compare_files import CompareFiles
from .types import FsPath
from .file_handler import FileHandler
_LOG = logging.getLogger(__name__)
class FileHandlerCompare(FileHandler):
"""Extend `FileHandler` with a compare method
Arguments:
protect_dirs_seq, work_dirs_seq, protect_exclude, work_include: See `FileGroups` class.
dry_run, protected_regexes, delete_symlinks_instead_of_relinking: See `FileHandler` class.
fcmp: Object providing compare function.
"""
def __init__(
self,
protect_dirs_seq: Sequence[Path], work_dirs_seq: Sequence[Path], fcmp: CompareFiles,
*,
dry_run: bool,
protected_regexes: Sequence[re.Pattern],
protect_exclude: re.Pattern|None = None, work_include: re.Pattern|None = None,
delete_symlinks_instead_of_relinking=False):
super().__init__(
protect_dirs_seq=protect_dirs_seq,
work_dirs_seq=work_dirs_seq,
dry_run=dry_run,
protected_regexes=protected_regexes,
protect_exclude=protect_exclude,
work_include=work_include,
delete_symlinks_instead_of_relinking=delete_symlinks_instead_of_relinking)
self._fcmp = fcmp
def compare(self, fsp1: FsPath, fsp2: FsPath) -> bool:
"""Extends CompareFiles.compare with logic to handle 'renamed/moved' files during dry_run."""
if not self.dry_run:
if self._fcmp.compare(fsp1, fsp2):
_LOG.info("Duplicates: '%s' '%s'", fsp1, fsp2)
return True
return False
fsp1_abs = str(Path(fsp1).absolute())
existing_fsp1 = Path(self.moved_from.get(os.fspath(fsp1_abs), fsp1))
fsp2_abs = str(Path(fsp2).absolute())
existing_fsp2 = Path(self.moved_from.get(os.fspath(fsp2_abs), fsp2))
if self._fcmp.compare(existing_fsp1, existing_fsp2):
_LOG.info("Duplicates: '%s' '%s'", fsp1, fsp2)
return True
return False
|
PypiClean
|
/gautomator2-contrib-0.1.10.tar.gz/gautomator2-contrib-0.1.10/ga2/uiimage/tmplatematcher.py
|
import os
import cv2
import time
import logging
import numpy as np
from .utils import MatchResult,_getPosition,transparent,judge_SimpleColor
from . import cv2helper
_DEBUG_HANDLERS = {}
logger = logging.getLogger('cvmatcher')
logger.setLevel(logging.DEBUG)
_filter = logging.Filter(name='cvmatcher')
def addHandler(hdlr):
global _DEBUG_HANDLERS, logger
clsName = hdlr.__class__.__name__
if clsName not in _DEBUG_HANDLERS:
hdlr.addFilter(_filter)
logger.addHandler(hdlr)
_DEBUG_HANDLERS[clsName] = hdlr
def pHash(img):
"""get image pHash value"""
#调整为灰色
if len(img.shape) > 2:
img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
h, w = img.shape[:2]
if h > 32 and w > 32:
#调整图片为32x32灰度图片
img=cv2.resize(img,(32,32),interpolation=cv2.INTER_CUBIC)
#创建二维列表
h, w = img.shape[:2]
vis0 = np.zeros((h,w), np.float32)
elif h > 16 and w > 16:
#调整图片为32x32灰度图片
img=cv2.resize(img,(16,16),interpolation=cv2.INTER_CUBIC)
#创建二维列表
h, w = img.shape[:2]
vis0 = np.zeros((h,w), np.float32)
else:
#调整图片为32x32灰度图片
img=cv2.resize(img,(8,8),interpolation=cv2.INTER_CUBIC)
#创建二维列表
h, w = img.shape[:2]
vis0 = np.zeros((h,w), np.float32)
# #调整图片为32x32灰度图片
# img=cv2.resize(img,(32,32),interpolation=cv2.INTER_CUBIC)
#
# #创建二维列表
# h, w = img.shape[:2]
# vis0 = np.zeros((h,w), np.float32)
vis0[:h,:w] = img #填充数据
#二维Dct变换
vis1 = cv2.dct(cv2.dct(vis0))
#cv.SaveImage('a.jpg',cv.fromarray(vis0)) #保存图片
vis1.resize(8,8)
#把二维list变成一维list
img_list = vis1.flatten()#flatten(vis1.tolist())
#计算均值
avg = sum(img_list)*1./len(img_list)
avg_list = ['0' if i<avg else '1' for i in img_list]
#得到哈希值
return ''.join(['%x' % int(''.join(avg_list[x:x+4]),2) for x in range(0,64,4)])
def hamming(h1, h2):
'''汉明距离'''
cnt = len(h1)
total = 0
for i in range(cnt):
c1 = int(h1[i],16)
c2 = int(h2[i],16)
h, d = 0, c1 ^ c2
while d:
h += 1
d &= d - 1
#print h
total += h
return total
def compareByHamming(img1, img2, similarity=5):
if not os.path.exists(img1):
logger.error('%s not found' % img1)
return False
if not os.path.exists(img2):
logger.error('%s not found' % img2)
return False
imgData1 = cv2.imread(img1)
imgData2 = cv2.imread(img2)
h1 = pHash(imgData1)
h2 = pHash(imgData2)
dis = hamming(h1, h2)
if dis <= similarity:
return True
return False
class Hist(object):
def _getImageData(self, filePath, gray):
if not os.path.exists(filePath):
logger.error('file not found:%s' % filePath)
return None
imgData = cv2helper.imread(filePath)
if imgData is None:
logger.error('invalid image format!')
return None
channel = imgData.shape[2]
if not gray or (channel == 1 and gray):
return imgData
imgData = cv2helper.cvtColor(imgData)
if imgData is None:
logger.error('failed to covert to gray image')
return None
return imgData
def _getHist(self, grayImg, colorBin=8):
hist = cv2.calcHist([grayImg], [0], None, [colorBin], [0, 255])
return cv2.normalize(hist, None).flatten() # opencv 3.x
def compare(self, img1, img2, gray=1, method='CHISQR', **kwargs):
if gray != 1:
raise NotImplementedError('gray must be 1')
if method != 'CHISQR':
raise NotImplementedError('method must be CHISQR')
img1Data = None
if isinstance(img1, str):
img1Data = self._getImageData(img1, gray)
elif cv2helper.isNdarray(img1):
if gray and len(img1.shape) == 3:
img1Data = cv2helper.cvtColor(img1)
else:
img1Data = img1
else:
raise TypeError('unknown type:%s' % type(img1))
img2Data = None
if isinstance(img2, str):
img2Data = self._getImageData(img2, gray)
elif cv2helper.isNdarray(img2):
if gray and len(img2.shape) == 3:
img2Data = cv2helper.cvtColor(img2)
else:
img2Data = img2
else:
raise TypeError('unknown type:%s' % type(img2))
if img1Data is None or img2Data is None:
raise ValueError('empty data')
colorBin = 8
if 'colorBin' in kwargs:
colorBin = kwargs['colorBin']
hist1 = self._getHist(img1Data, colorBin=colorBin)
hist2 = self._getHist(img2Data, colorBin=colorBin)
# method - cv2.HISTCMP_CHISQR, cv2.HISTCMP_BHATTACHARYYA, cv2.HISTCMP_CORREL, cv2.HISTCMP_INTERSECT
rlt = cv2.compareHist(hist1, hist2, cv2.HISTCMP_CHISQR) # [0, max), 0为最匹配, 取个经验值,小于0.1为相似
# print(cv2.compareHist(hist1, hist2, cv2.HISTCMP_BHATTACHARYYA))
# print(cv2.compareHist(hist1, hist2, cv2.HISTCMP_CORREL))
# print(cv2.compareHist(hist1, hist2, cv2.HISTCMP_INTERSECT))
logger.info('hist result %s'%str(rlt))
return rlt
def similar(self, img1, img2, similarity=0.1, gray=1, method='CHISQR', **kwargs):
rlt = self.compare(img1, img2, gray, method)
return (rlt <= similarity)
class TemplateMatcher(object):
_SCALE_RATIO_CACHE = {}
_DEBUG_HANDLERS = {}
def __init__(self):
self._hist = Hist()
# if debug:
# selfpid = os.getpid()
# logDir = getEnv('WORKSPACE', os.getcwd())
# filePath=os.path.join(logDir, 'cvmatcher_{name}.log'.format(name=selfpid))
#
# mode = 'w'
# if os.path.exists(filePath):
# mode = 'a'
#
# if selfpid not in TemplateMatcher._DEBUG_HANDLERS: # 避免重复
# if int(getEnv('CVMATCHER_STDOUT', '0')):
# _stdout = handler.DebugStreamHandler()
# _stdout.addFilter(_filter)
# logger.addHandler(_stdout)
#
# _hdlr = handler.DebugFileHandler(filePath, mode)
# _hdlr.addFilter(_filter)
# logger.addHandler(_hdlr)
# TemplateMatcher._DEBUG_HANDLERS[selfpid] = _hdlr
def _reset(self, trustSimilarity, **kwargs):
self._maxHistSimilarity = 0
self._minHistSimilarity = 1000
self._maxSimilarity = 0
self._minSimilarity = 1000
self._matchCount = 0
self._similarity = 0
self._histSimilarity = 0
self._trustSimilarity = trustSimilarity
self._badTryCount = kwargs.get('badTryCount', 2)
self._zoom = kwargs.get('zoom', 0)
self._zoomOutDelta = kwargs.get('zoomOutDelta', 0.2)
self._zoomInDelta = kwargs.get('zoomInDelta', 0.2)
self._experienceSimilarity = kwargs.get('experienceSimilarity', 0.65)
if self._experienceSimilarity >= trustSimilarity - 0.05:
self._experienceSimilarity = trustSimilarity - 0.05
self._lessGoodSimilarity2 = kwargs.get('lessGoodSimilarity2', 0.6)
if self._lessGoodSimilarity2 >= trustSimilarity - 0.1:
self._lessGoodSimilarity2 = trustSimilarity - 0.1
self._untrustSimilarity = kwargs.get('untrustSimilarity', 0.3)
self._histTrustSimilarity = kwargs.get('histTrustSimilarity', 0.1)
self._histUntrustSimilarity = kwargs.get('histUntrustSimilarity', 10)
self._use_histSimilarity = True
def _getImageData(self, filePath, gray):
if not os.path.exists(filePath):
logger.error('file not found:%s' % filePath)
return None
imgData = cv2helper.imread(filePath)
if imgData is None:
logger.error('invalid image format!')
return None
channel = imgData.shape[2]
if not gray or (channel == 1 and gray):
return imgData
imgData = cv2helper.cvtColor(imgData)
if imgData is None:
logger.error('failed to covert to gray image')
return None
return imgData
def _getMultiResult(self, target, loc, distance=5):
"""获取多个相似对象"""
if loc[0].size < 2:
return [target]
st = time.time()
pts = zip(*loc[::-1])
tmp = [target]
for pt in pts:
if abs(pt[0] - target[0]) > distance or abs(pt[1] - target[1]) > distance:
tmp.append(pt)
rlt = []
for x in range(2):
tmp.sort(key=lambda pt: pt[x]) # 坐标点(x,y),先x排序,后y排序
rlt = tmp[:]
i = 0
while i < len(tmp) - 1:
pt_i = tmp[i]
j = i + 1
while j < len(tmp):
pt_j = tmp[j]
if abs(pt_j[0] - pt_i[0]) < distance and abs(pt_j[1] - pt_i[1]) < distance:
if pt_j in rlt:
if pt_j == target and pt_i in rlt:
rlt.remove(pt_i)
else:
rlt.remove(pt_j)
else:
break
j = j + 1
i = j
tmp = rlt
et = time.time()
logger.info('time cost:%s' % str(et - st))
return rlt
def _matchTemplateWithSeperateChannels(self, tplData, scnData, rect):
if len(tplData.shape) < 3:
# 已经是单通道,不再需要分通道
return True
tplImgHeight, tplImgWidth = tplData.shape[:2]
newTplImg = tplData.copy()
distance = min(tplImgHeight, tplImgWidth)
t = cv2.split(newTplImg)
s = cv2.split(scnData)
max_similarity = 0
max_rc = None
c = 0
for i in range(3):
rlt = cv2helper.matchTemplate(s[i], t[i])
_, max_val, _, max_loc = cv2helper.minMaxLoc(rlt)
if max_val > max_similarity:
max_similarity = max_val
max_rc = max_loc
c = i
logger.debug('rc:%s' % str(rect))
logger.debug('channel:%s' % c)
logger.debug('channel_rc:%s' % str(max_rc))
if abs(max_rc[0] - rect[0]) < distance and abs(max_rc[1] - rect[1]) < distance:
return True
return False
def _validate(self, img1, img2, pt, print_log=False):
img1 = img1.copy()
h, w = img1.shape[:2]
img2 = cv2helper.roi(img2.copy(), (pt[0], pt[1], pt[0] + w, pt[1] + h))
# 直方图256 bins验证, 在遇到23_t_1时判断失败
histValue = Hist().compare(img1, img2, colorBin=256) # 10 for 256, 0.1 for 8 (color bin)
if print_log:
# print('==============>hist: %s' % (histValue))
logger.debug('----------pt:%s--' % str(pt))
logger.debug('----------hist check--:%s--' % histValue)
if histValue > 50:
return 2
elif histValue <= 10:
return 0
else:
return 1
# phash验证
# p1 = pHash(img1)
# p2 = pHash(img2)
# pHM = hamming(p1, p2)
# if print_log:
# # print('============>hamming distance: %s' % (pHM))
# logger.debug('---------->pt:%s--phash and hamming distance check--:%s<---------' % (str(pt), pHM))
#
# if pHM > 15:
# return 2
#
# if histValue <= 10 and pHM <= 15:
# return 0
#
# return 1
# if rlt < 10:
# return 0
# elif rlt >= 10 and rlt < 50:
# return 1
# else:
# return 2
def _matchTemplate(self, tplImg, scnImg, scale=1.0, similarity=0.7, index=-1):
tplImgHeight, tplImgWidth = tplImg.shape[:2]
scnImgHeight, scnImgWidth = scnImg.shape[:2]
newTplImg = tplImg.copy()
if scale != 1.0:
tplImgHeight = int(tplImgHeight * scale)
tplImgWidth = int(tplImgWidth * scale)
newTplImg = cv2helper.resize(tplImg, tplImgWidth, tplImgHeight)
if tplImgHeight > scnImgHeight or tplImgWidth > scnImgWidth:
return (0, None)
self._matchCount = self._matchCount + 1
rlt = cv2helper.matchTemplate(scnImg.copy(), newTplImg.copy())
_, max_val, _, max_loc = cv2helper.minMaxLoc(rlt)
logger.debug('scale:%s' % scale)
logger.debug('height:%s' % tplImgHeight)
logger.debug('width:%s' % tplImgWidth)
logger.debug('searched similarity:%s' % max_val)
logger.debug('searched point:%s' % str(max_loc))
self._similarity = max_val
if max_val > self._maxSimilarity:
self._maxSimilarity = max_val
if max_val < self._minSimilarity:
self._minSimilarity = max_val
if max_val > similarity:
need_validation = True
if isinstance(index, int) and index >= 0:
loc = cv2helper.npwhere(rlt >= similarity)
cnt = loc[0].size
if cnt > 1:
distance = min(tplImgHeight, tplImgWidth)
logger.debug('found count:%s' % cnt)
logger.debug('distance:%s' % distance)
tmpPoints = self._getMultiResult(max_loc, loc, distance)
logger.debug('distance:%s' % distance)
# todo: check whether it is real suitable
pts = []
for pt in tmpPoints:
# # phash 验证
# p1 = pHash(newTplImg.copy())
# roiImg = cv2helper.roi(scnImg.copy(), (pt[0], pt[1], pt[0]+tplImgWidth, pt[1]+tplImgHeight))
# p2 = pHash(roiImg)
# pHM = hamming(p1, p2)
# logger.debug('pt:%s--hamming distance--:%s' % (str(pt),pHM))
# # 三通道模板匹配验证
# matched = self._matchTemplateWithSeperateChannels(newTplImg.copy(), scnImg.copy(), (pt[0],pt[1],0,0))
# logger.debug('pt:%s--hist check--:%s' % (str(pt),matched))
# if matched:
# pts.append(pt)
# 直方图验证
validation_rlt = self._validate(newTplImg, scnImg, pt)
if validation_rlt == 0:
pts.append(pt)
finalCount = len(pts)
if finalCount > index:
tmp = pts[index]
if tmp == max_loc:
need_validation = False
max_loc = tmp
logger.info('final count:%s' % finalCount)
logger.info('index:%s' % index)
logger.info('point:%s' % str(max_loc))
# todo: 找到后需要再次通过直方图确认,如果相似度很高,则不必再验证
if need_validation and max_val < 0.8:
if self._validate(newTplImg, scnImg, max_loc, True) > 1:
# 很大可能不相似
logger.info('not similarity after validation')
max_loc = None
# max_val = 0
rect = None
if max_loc:
rect = (max_loc[0], max_loc[1], tplImgWidth, tplImgHeight)
return (max_val, rect)
def _roi(self, tpl, img, rc, scaleRatioDelta):
th, tw = tpl.shape[:2]
ih, iw = img.shape[:2]
l, t, w, h = rc
delta = int(max(th * scaleRatioDelta, tw * scaleRatioDelta, 20)) + 1
_l = l - delta
_t = t - delta
_r = l + w + delta
_b = t + h + delta
if _l < 0:
_l = 0
if _t < 0:
_t = 0
if _r > iw:
_r = iw - 1
if _b > ih:
_b = ih - 1
newImg = img[_t:_b, _l:_r].copy()
return (newImg, (_l, _t))
def _getHistSimilarity(self, tpl, scene, scale, rc):
if not self._use_histSimilarity:
return 1000
tplImgHeight, tplImgWidth = tpl.shape[:2]
newTplImg = tpl.copy()
if scale != 1.0:
tplImgHeight = int(tplImgHeight * scale)
tplImgWidth = int(tplImgWidth * scale)
newTplImg = cv2helper.resize(tpl, tplImgWidth, tplImgHeight)
l, t, w, h = rc
newSceneImg = scene[t:(t + h), l:(l + w)].copy()
histSimilarity = self._hist.compare(newTplImg, newSceneImg)
self._histSimilarity = histSimilarity
if histSimilarity > self._maxHistSimilarity:
self._maxHistSimilarity = histSimilarity
if histSimilarity < self._minHistSimilarity:
self._minHistSimilarity = histSimilarity
logger.debug('hist similarity %s'%histSimilarity)
return histSimilarity
def _scale_direction(self, templ, image, direction, zoomMax, scale, lastMaxVal, rocPoint):
logger.debug('scale direction, direction:%s' % direction)
logger.debug('zoomMax:%s' % zoomMax)
logger.debug('scale:%s' % scale)
logger.debug('lastMaxValue:%s' % lastMaxVal)
rocImage = image.copy()
tryCount = 0
while abs(1.0 - scale) < zoomMax:
if lastMaxVal > self._experienceSimilarity:
delta = 0.01
elif lastMaxVal > self._lessGoodSimilarity2:
delta = 0.02
else:
delta = 0.03
scale = round(scale + direction * delta, 3)
max_val, rect = self._matchTemplate(templ, rocImage, scale, self._trustSimilarity)
if rect is None:
break
if max_val >= self._lessGoodSimilarity2:
# 相似度必须大于某一个阈值,直方图比较才作为参考
histSimilarity = self._getHistSimilarity(templ, image, scale, rect)
if histSimilarity <= self._histTrustSimilarity:
if max_val >= self._experienceSimilarity:
# 如果相似度 > experienceSimilarity 并且 直方图比较结果 < histTrustSimilarity, 则认为很相似,直接返回
return scale, max_val, self._calcRect(rect, rocPoint)
if rocPoint is None:
rocImage, rocPoint = self._roi(templ, image, rect, zoomMax)
time.sleep(0.05) # 必须加,释放CPU资源
if max_val < self._trustSimilarity:
if max_val < lastMaxVal and max_val < self._experienceSimilarity:
# 比上次匹配结果更差,但是相似度小于experienceSimilarity
tryCount += 1
if tryCount > self._badTryCount:
break
else:
tryCount = 1
lastMaxVal = max_val
continue
return scale, max_val, self._calcRect(rect, rocPoint)
return scale, None, None
def _calcRect(self, rect, rocPoint):
if rocPoint is None:
rocPoint = (0, 0)
l, t, w, h = rect
x, y = rocPoint
rect = (l + x, t + y, w, h)
return rect
def _scaleMatch(self, templ, image, similarity, maxValue, maxRect, **kwargs):
lastMaxVal = maxValue
scale = 1.0
rocPoint = None
rocImage = image.copy()
if maxValue >= self._lessGoodSimilarity2:
# 相似度必须大于某一个阈值,直方图比较才作为参考
histSimilarity = self._getHistSimilarity(templ, image, scale, maxRect)
if histSimilarity <= self._histTrustSimilarity:
if maxValue >= self._experienceSimilarity:
# 如果相似度 > experienceSimilarity 并且 直方图比较结果 < histTrustSimilarity, 则认为很相似,直接返回
return scale, maxValue, maxRect
rocImage, rocPoint = self._roi(templ, image, maxRect, self._zoomOutDelta)
zoomAttr = {1: [0, 0, 0], -1: [0, 0, 0]}
if self._zoom == 0:
# 计算该先往哪方面缩放
delta = 0.03
if lastMaxVal > self._experienceSimilarity:
delta = 0.01
elif lastMaxVal > self._lessGoodSimilarity2:
delta = 0.02
zoom_out_scale = 1.0 + delta
zoom_out_max_val, zoom_out_rect = self._matchTemplate(templ, rocImage, zoom_out_scale,
self._trustSimilarity)
if zoom_out_rect is None:
return 1.0, None, None
if zoom_out_max_val >= self._trustSimilarity:
return zoom_out_scale, zoom_out_max_val, self._calcRect(zoom_out_rect, rocPoint)
zoom_in_scale = 1.0 - delta
zoom_in_max_val, zoom_in_rect = self._matchTemplate(templ, rocImage, zoom_in_scale, self._trustSimilarity)
if zoom_in_rect is None:
return 1.0, None, None
if zoom_in_max_val >= self._trustSimilarity:
return zoom_in_scale, zoom_out_max_val, self._calcRect(zoom_in_rect, rocPoint)
zoomAttr[1] = [self._zoomOutDelta, zoom_out_scale, zoom_out_max_val]
zoomAttr[-1] = [self._zoomInDelta, zoom_in_scale, zoom_in_max_val]
if zoom_out_max_val >= zoom_in_max_val:
direction = 1
else:
direction = -1
else:
direction = self._zoom
scale = 1.0
zoomAttr[direction][1] = 1.0
zoomAttr[direction][2] = lastMaxVal
if direction == 1:
zoomAttr[direction][0] = self._zoomOutDelta
else:
zoomAttr[direction][0] = self._zoomInDelta
logger.debug('direction:%s' %direction)
logger.debug('zoom:%s' % self._zoom)
logger.debug('zoom attrs:%s' % str(zoomAttr))
scale, max_val, rect = self._scale_direction(templ, rocImage, direction, zoomAttr[direction][0],
zoomAttr[direction][1], zoomAttr[direction][2], rocPoint)
logger.debug('direction:%s' % direction)
logger.debug('scale:%s' % scale)
logger.debug('max_val:%s' % max_val)
logger.debug('rect:%s' % str(rect))
if rect is not None:
return scale, max_val, rect
if self._zoom == 0:
direction = direction * -1
scale, max_val, rect = self._scale_direction(templ, rocImage, direction, zoomAttr[direction][0],
zoomAttr[direction][1], zoomAttr[direction][2], rocPoint)
if rect is not None:
return scale, max_val, rect
return 1.0, None, None
def _searchImage(self, tplData, sceneData, similarity, **kwargs):
sceneImg = sceneData.copy()
position = kwargs.get('position')
logger.debug('templateImg position:%s' % str(position))
is_transparent = kwargs.get('is_transparent')
logger.debug('templateImg is_transparent:%s' % is_transparent)
if position and len(position) == 4:
logger.info('TemplateImg position is locked,crop Img!')
crop_position = _getPosition(tplData, sceneData, position)
crop_x_min, crop_y_min, crop_x_max, crop_y_max = crop_position
sceneData = sceneData[crop_y_min:crop_y_max, crop_x_min:crop_x_max]
else:
logger.info('TemplateImg position is unlocked')
crop_x_min, crop_y_min, crop_x_max, crop_y_max = 0, 0, 0, 0
templ_h, templ_w = tplData.shape[:2]
sceneImage_h, sceneImage_w = sceneData.shape[:2]
if templ_h > sceneImage_h or templ_w > sceneImage_w:
logger.error('tpl image size is smaller than screen' )
return (None, None)
res_simpleColor=judge_SimpleColor(tplData)
scaleRatio = 1.0
if is_transparent==1 or res_simpleColor:
sim,rect=transparent(tplData,sceneData)
if position and len(position) == 4:
if sim>=0.6:
return ((rect[0]+crop_x_min, rect[1]+crop_y_min, rect[0] + rect[2]+crop_x_min, rect[1] + rect[3]+crop_y_min), scaleRatio)
else:
if sim>=0.8:
return ((rect[0] + crop_x_min, rect[1] + crop_y_min, rect[0] + rect[2] + crop_x_min,
rect[1] + rect[3] + crop_y_min), scaleRatio)
if 'scaleRatio' in kwargs:
scaleRatio = kwargs.pop('scaleRatio')
max_val, rect = self._matchTemplate(tplData, sceneData, scaleRatio, similarity)
else:
max_val, rect = self._matchTemplate(tplData, sceneData, 1.0, similarity)
if rect is None:
return (None, None)
if max_val < similarity:
if 'scaleMatch' in kwargs and kwargs['scaleMatch']:
scaleRatio, max_val, rect = self._scaleMatch(tplData, sceneData, similarity, max_val, rect, **kwargs)
if rect:
x, y, w, h = rect
return ((x+crop_x_min, y+crop_y_min, x + w+crop_x_min, y + h+crop_y_min), scaleRatio)
return (None, None)
return ((rect[0]+crop_x_min, rect[1]+crop_y_min, rect[0] + rect[2]+crop_x_min, rect[1] + rect[3]+crop_y_min), scaleRatio)
def searchImage(self, templateImage, sceneImage, similarity=0.7, gray=False, **kwargs):
result = MatchResult()
result.method = 'template'
st = time.time()
title = ''
self._reset(similarity, **kwargs)
tplData = None
if isinstance(templateImage, str):
title = os.path.splitext(os.path.split(templateImage)[-1])[0]
tplData = self._getImageData(templateImage, gray)
elif cv2helper.isNdarray(templateImage):
if gray and len(tplData.shape) == 3:
tplData = cv2helper.cvtColor(templateImage)
else:
tplData = templateImage
else:
raise TypeError('unknown type:%s' % type(templateImage))
sceneData = None
if isinstance(sceneImage, str):
title = '%s_%s' % (title, os.path.splitext(os.path.split(sceneImage)[-1])[0])
sceneData = self._getImageData(sceneImage, gray)
elif cv2helper.isNdarray(sceneImage):
if gray and len(sceneImage.shape) == 3:
sceneData = cv2helper.cvtColor(sceneImage)
else:
sceneData = sceneImage
else:
raise TypeError('unknown type:%s' % type(sceneImage))
if tplData is None or sceneData is None:
et = time.time()
time_cost = et - st
logger.debug('failed to get image data')
result.time=time_cost
result.parameters = kwargs
return result
if isinstance(templateImage, str) and int(kwargs.get('cache', '1')) and \
templateImage in TemplateMatcher._SCALE_RATIO_CACHE:
kwargs['scaleRatio'] = TemplateMatcher._SCALE_RATIO_CACHE[templateImage]
rect, scaleRatio = self._searchImage(tplData, sceneData, similarity, **kwargs)
if rect:
index = kwargs.get('index', -1)
if index >= 0:
logger.debug('index:%s' % index)
finalSimilarity = self._similarity
if finalSimilarity > similarity:
# 取标准相似度值,否则结果只有1个
finalSimilarity = similarity
_, _rect = self._matchTemplate(tplData, sceneData, scaleRatio, finalSimilarity, index)
if rect:
l, t, w, h = _rect
rect = (l, t, l + w, t + h)
if rect:
if isinstance(templateImage, str) and int(kwargs.get('cache', '1')) and \
scaleRatio != 1.0 and templateImage not in TemplateMatcher._SCALE_RATIO_CACHE:
TemplateMatcher._SCALE_RATIO_CACHE[templateImage] = scaleRatio
if isinstance(templateImage, str):
et = time.time()
time_cost = et - st
logger.debug('found, matched count:%s' % self._matchCount)
logger.debug('rect: %s' % str(rect))
logger.debug('final scaleRatio:%s' % scaleRatio)
logger.debug('max hist:%s' % self._maxHistSimilarity)
logger.debug('min similarity:%s' % self._minSimilarity)
logger.debug('histSimilarity:%s' % self._histSimilarity)
logger.debug('similarity:%s' % self._similarity)
logger.debug('time cost:%s' % time_cost)
logger.debug('matched count:%s' % self._matchCount)
if 'showResult' in kwargs and kwargs['showResult']:
self._showResult(tplData, sceneData, [rect], "OK_%s" % title)
result.rect = rect
et = time.time()
time_cost = et - st
result.time = time_cost
result.scale_ratio = scaleRatio
result.parameters = kwargs
return result
if isinstance(templateImage, str):
et = time.time()
time_cost = et - st
result.time = time_cost
result.parameters = kwargs
logger.debug('none, matched count:%s' % self._matchCount)
logger.debug('rect: %s' % str(rect))
logger.debug('max hist:%s' % self._maxHistSimilarity)
logger.debug('min similarity:%s' % self._minSimilarity)
logger.debug('time cost:%s' % time_cost)
logger.debug('matched count:%s' % self._matchCount)
return result
def _showResult(self, templateData, sceneData, rects, title='show'):
for rect in rects:
l, t, r, b = rect
cv2helper.eval2('cv2.rectangle')(sceneData, (l, t), (r, b), (0, 0, 255), 2)
h1, w1 = templateData.shape[:2]
h2, w2 = sceneData.shape[:2]
if len(sceneData.shape) > 2:
vis = cv2helper.eval2('np.zeros')((max(h1, h2), w1 + w2, 3))
else:
vis = cv2helper.eval2('np.zeros')((max(h1, h2), w1 + w2))
vis[:h1, :w1] = templateData
vis[:h2, w1:w1 + w2] = sceneData
fshow = '%s.jpg' % title
cv2.imwrite(fshow, vis)
tmp = cv2.imread(fshow)
cv2helper.show(tmp, title)
os.remove(fshow)
|
PypiClean
|
/lbt-recipes-0.26.0.tar.gz/lbt-recipes-0.26.0/lbt_recipes/cumulative_radiation/flow/main_49ddb174.py
|
import luigi
import pathlib
from queenbee_local import QueenbeeTask
from queenbee_local import load_input_param as qb_load_input_param
from . import _queenbee_status_lock_
from .dependencies.cumulative_radiation_postprocess import _CumulativeRadiationPostprocess_49ddb174Orchestrator as CumulativeRadiationPostprocess_49ddb174Workerbee
from .dependencies.cumulative_radiation_prepare_folder import _CumulativeRadiationPrepareFolder_49ddb174Orchestrator as CumulativeRadiationPrepareFolder_49ddb174Workerbee
_default_inputs = { 'cpu_count': 50,
'grid_filter': '*',
'min_sensor_count': 500,
'model': None,
'north': 0.0,
'params_folder': '__params',
'radiance_parameters': '-ab 2 -ad 5000 -lw 2e-05',
'simulation_folder': '.',
'sky_density': 1,
'timestep': 1,
'wea': None}
class PrepareFolderCumulativeRadiation(QueenbeeTask):
"""No description is provided."""
# DAG Input parameters
_input_params = luigi.DictParameter()
_status_lock = _queenbee_status_lock_
# Task inputs
@property
def timestep(self):
return self._input_params['timestep']
@property
def north(self):
return self._input_params['north']
@property
def cpu_count(self):
return self._input_params['cpu_count']
@property
def min_sensor_count(self):
return self._input_params['min_sensor_count']
@property
def grid_filter(self):
return self._input_params['grid_filter']
@property
def sky_density(self):
return self._input_params['sky_density']
radiance_parameters = luigi.Parameter(default='-ab 2 -ad 5000 -lw 2e-05')
@property
def model(self):
value = pathlib.Path(self._input_params['model'])
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def wea(self):
value = pathlib.Path(self._input_params['wea'])
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
@property
def map_dag_inputs(self):
"""Map task inputs to DAG inputs."""
inputs = {
'simulation_folder': self.execution_folder,
'timestep': self.timestep,
'north': self.north,
'cpu_count': self.cpu_count,
'min_sensor_count': self.min_sensor_count,
'grid_filter': self.grid_filter,
'sky_density': self.sky_density,
'model': self.model,
'wea': self.wea
}
try:
inputs['__debug__'] = self._input_params['__debug__']
except KeyError:
# not debug mode
pass
return inputs
def run(self):
yield [CumulativeRadiationPrepareFolder_49ddb174Workerbee(_input_params=self.map_dag_inputs)]
pathlib.Path(self.execution_folder).mkdir(parents=True, exist_ok=True)
self._copy_output_artifacts(self.execution_folder)
self._copy_output_parameters(self.execution_folder)
pathlib.Path(self.execution_folder, 'prepare_folder_cumulative_radiation.done').write_text('done!')
def output(self):
return {
'model_folder': luigi.LocalTarget(
pathlib.Path(self.execution_folder, 'model').resolve().as_posix()
),
'resources': luigi.LocalTarget(
pathlib.Path(self.execution_folder, 'resources').resolve().as_posix()
),
'results': luigi.LocalTarget(
pathlib.Path(self.execution_folder, 'results').resolve().as_posix()
),
'initial_results': luigi.LocalTarget(
pathlib.Path(self.execution_folder, 'initial_results').resolve().as_posix()
),
'sensor_grids': luigi.LocalTarget(
pathlib.Path(
self.params_folder,
'resources/grid/_info.json').resolve().as_posix()
),
'grids_info': luigi.LocalTarget(
pathlib.Path(
self.params_folder,
'results/average_irradiance/grids_info.json').resolve().as_posix()
),
'is_done': luigi.LocalTarget(pathlib.Path(self.execution_folder, 'prepare_folder_cumulative_radiation.done').resolve().as_posix())
}
@property
def output_artifacts(self):
return [
{
'name': 'model-folder', 'from': 'model',
'to': pathlib.Path(self.execution_folder, 'model').resolve().as_posix(),
'optional': False,
'type': 'folder'
},
{
'name': 'resources', 'from': 'resources',
'to': pathlib.Path(self.execution_folder, 'resources').resolve().as_posix(),
'optional': False,
'type': 'folder'
},
{
'name': 'results', 'from': 'results',
'to': pathlib.Path(self.execution_folder, 'results').resolve().as_posix(),
'optional': False,
'type': 'folder'
},
{
'name': 'initial-results', 'from': 'initial_results',
'to': pathlib.Path(self.execution_folder, 'initial_results').resolve().as_posix(),
'optional': False,
'type': 'folder'
}]
@property
def output_parameters(self):
return [{'name': 'sensor-grids', 'from': 'resources/grid/_info.json', 'to': pathlib.Path(self.params_folder, 'resources/grid/_info.json').resolve().as_posix()},
{'name': 'grids-info', 'from': 'results/average_irradiance/grids_info.json', 'to': pathlib.Path(self.params_folder, 'results/average_irradiance/grids_info.json').resolve().as_posix()}]
class SkyRadiationRaytracingLoop(QueenbeeTask):
"""Calculate daylight coefficient for a grid of sensors from a sky matrix."""
# DAG Input parameters
_input_params = luigi.DictParameter()
_status_lock = _queenbee_status_lock_
# Task inputs
@property
def radiance_parameters(self):
return self._input_params['radiance_parameters']
@property
def fixed_radiance_parameters(self):
return '-aa 0.0 -I -c 1'
@property
def sensor_count(self):
return self.item['count']
@property
def conversion(self):
return '0.265 0.670 0.065'
@property
def output_format(self):
return 'a'
header = luigi.Parameter(default='keep')
order_by = luigi.Parameter(default='sensor')
@property
def sky_dome(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['resources'].path, 'sky.dome')
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def sky_matrix(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['resources'].path, 'sky.mtx')
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def scene_file(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['resources'].path, 'scene.oct')
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def sensor_grid(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['resources'].path, 'grid/{item_full_id}.pts'.format(item_full_id=self.item['full_id']))
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def bsdf_folder(self):
try:
pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['model_folder'].path, 'bsdf')
except TypeError:
# optional artifact
return None
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['model_folder'].path, 'bsdf')
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
# get item for loop
try:
item = luigi.DictParameter()
except Exception:
item = luigi.Parameter()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder'], 'initial_results/{item_full_id}'.format(item_full_id=self.item['full_id'])).resolve().as_posix()
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
@property
def __script__(self):
return pathlib.Path(__file__).parent.joinpath('scripts', 'sky_radiation_raytracing.py').resolve()
@property
def is_script(self):
return False
def command(self):
return 'honeybee-radiance dc scoeff scene.oct grid.pts sky.dome sky.mtx --sensor-count {sensor_count} --output results.ill --rad-params "{radiance_parameters}" --rad-params-locked "{fixed_radiance_parameters}" --conversion "{conversion}" --output-format {output_format} --order-by-{order_by} --{header}-header'.format(header=self.header, output_format=self.output_format, radiance_parameters=self.radiance_parameters, conversion=self.conversion, order_by=self.order_by, sensor_count=self.sensor_count, fixed_radiance_parameters=self.fixed_radiance_parameters)
def requires(self):
return {'PrepareFolderCumulativeRadiation': PrepareFolderCumulativeRadiation(_input_params=self._input_params)}
def output(self):
return {
'result_file': luigi.LocalTarget(
pathlib.Path(self.execution_folder, '../{item_name}.res'.format(item_name=self.item['name'])).resolve().as_posix()
)
}
@property
def input_artifacts(self):
return [
{'name': 'sky_dome', 'to': 'sky.dome', 'from': self.sky_dome, 'optional': False},
{'name': 'sky_matrix', 'to': 'sky.mtx', 'from': self.sky_matrix, 'optional': False},
{'name': 'scene_file', 'to': 'scene.oct', 'from': self.scene_file, 'optional': False},
{'name': 'sensor_grid', 'to': 'grid.pts', 'from': self.sensor_grid, 'optional': False},
{'name': 'bsdf_folder', 'to': 'model/bsdf', 'from': self.bsdf_folder, 'optional': True}]
@property
def output_artifacts(self):
return [
{
'name': 'result-file', 'from': 'results.ill',
'to': pathlib.Path(self.execution_folder, '../{item_name}.res'.format(item_name=self.item['name'])).resolve().as_posix(),
'optional': False,
'type': 'file'
}]
@property
def input_parameters(self):
return {
'radiance_parameters': self.radiance_parameters,
'fixed_radiance_parameters': self.fixed_radiance_parameters,
'sensor_count': self.sensor_count,
'conversion': self.conversion,
'output_format': self.output_format,
'header': self.header,
'order_by': self.order_by}
@property
def task_image(self):
return 'docker.io/ladybugtools/honeybee-radiance:1.65.32'
@property
def image_workdir(self):
return '/home/ladybugbot/run'
class SkyRadiationRaytracing(luigi.Task):
"""Calculate daylight coefficient for a grid of sensors from a sky matrix."""
# global parameters
_input_params = luigi.DictParameter()
@property
def sensor_grids(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['sensor_grids'].path)
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def items(self):
try:
# assume the input is a file
return qb_load_input_param(self.sensor_grids)
except:
# it is a parameter
return self.input()['PrepareFolderCumulativeRadiation']['sensor_grids'].path
def run(self):
yield [SkyRadiationRaytracingLoop(item=item, _input_params=self._input_params) for item in self.items]
done_file = pathlib.Path(self.execution_folder, 'sky_radiation_raytracing.done')
done_file.parent.mkdir(parents=True, exist_ok=True)
done_file.write_text('done!')
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
def requires(self):
return {'PrepareFolderCumulativeRadiation': PrepareFolderCumulativeRadiation(_input_params=self._input_params)}
def output(self):
return {
'is_done': luigi.LocalTarget(pathlib.Path(self.execution_folder, 'sky_radiation_raytracing.done').resolve().as_posix())
}
@property
def task_image(self):
return 'docker.io/ladybugtools/honeybee-radiance:1.65.32'
@property
def image_workdir(self):
return '/home/ladybugbot/run'
class RestructureResults(QueenbeeTask):
"""Restructure files in a distributed folder."""
# DAG Input parameters
_input_params = luigi.DictParameter()
_status_lock = _queenbee_status_lock_
# Task inputs
@property
def extension(self):
return 'res'
@property
def input_folder(self):
value = pathlib.Path('initial_results')
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
@property
def __script__(self):
return pathlib.Path(__file__).parent.joinpath('scripts', 'restructure_results.py').resolve()
@property
def is_script(self):
return False
def command(self):
return 'honeybee-radiance grid merge-folder ./input_folder ./output_folder {extension} --dist-info dist_info.json'.format(extension=self.extension)
def requires(self):
return {'SkyRadiationRaytracing': SkyRadiationRaytracing(_input_params=self._input_params)}
def output(self):
return {
'output_folder': luigi.LocalTarget(
pathlib.Path(self.execution_folder, 'results/average_irradiance').resolve().as_posix()
)
}
@property
def input_artifacts(self):
return [
{'name': 'input_folder', 'to': 'input_folder', 'from': self.input_folder, 'optional': False}]
@property
def output_artifacts(self):
return [
{
'name': 'output-folder', 'from': 'output_folder',
'to': pathlib.Path(self.execution_folder, 'results/average_irradiance').resolve().as_posix(),
'optional': False,
'type': 'folder'
}]
@property
def input_parameters(self):
return {
'extension': self.extension}
@property
def task_image(self):
return 'docker.io/ladybugtools/honeybee-radiance:1.65.32'
@property
def image_workdir(self):
return '/home/ladybugbot/run'
class CumulativeRadiationPostprocessLoop(luigi.Task):
"""No description is provided."""
# DAG Input parameters
_input_params = luigi.DictParameter()
_status_lock = _queenbee_status_lock_
# Task inputs
@property
def grid_name(self):
return self.item['full_id']
@property
def timestep(self):
return self._input_params['timestep']
@property
def average_irradiance(self):
value = pathlib.Path(self.input()['RestructureResults']['output_folder'].path, '{item_full_id}.res'.format(item_full_id=self.item['full_id']))
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def wea(self):
value = pathlib.Path(self._input_params['wea'])
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
# get item for loop
try:
item = luigi.DictParameter()
except Exception:
item = luigi.Parameter()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
@property
def map_dag_inputs(self):
"""Map task inputs to DAG inputs."""
inputs = {
'simulation_folder': self.execution_folder,
'grid_name': self.grid_name,
'average_irradiance': self.average_irradiance,
'wea': self.wea,
'timestep': self.timestep
}
try:
inputs['__debug__'] = self._input_params['__debug__']
except KeyError:
# not debug mode
pass
return inputs
def run(self):
yield [CumulativeRadiationPostprocess_49ddb174Workerbee(_input_params=self.map_dag_inputs)]
done_file = pathlib.Path(self.execution_folder, 'cumulative_radiation_postprocess.done')
done_file.parent.mkdir(parents=True, exist_ok=True)
done_file.write_text('done!')
def requires(self):
return {'PrepareFolderCumulativeRadiation': PrepareFolderCumulativeRadiation(_input_params=self._input_params), 'RestructureResults': RestructureResults(_input_params=self._input_params)}
def output(self):
return {
'is_done': luigi.LocalTarget(pathlib.Path(self.execution_folder, 'cumulative_radiation_postprocess.done').resolve().as_posix())
}
class CumulativeRadiationPostprocess(luigi.Task):
"""No description is provided."""
# global parameters
_input_params = luigi.DictParameter()
@property
def grids_info(self):
value = pathlib.Path(self.input()['PrepareFolderCumulativeRadiation']['grids_info'].path)
return value.as_posix() if value.is_absolute() \
else pathlib.Path(self.initiation_folder, value).resolve().as_posix()
@property
def items(self):
try:
# assume the input is a file
return qb_load_input_param(self.grids_info)
except:
# it is a parameter
return self.input()['PrepareFolderCumulativeRadiation']['grids_info'].path
def run(self):
yield [CumulativeRadiationPostprocessLoop(item=item, _input_params=self._input_params) for item in self.items]
done_file = pathlib.Path(self.execution_folder, 'cumulative_radiation_postprocess.done')
done_file.parent.mkdir(parents=True, exist_ok=True)
done_file.write_text('done!')
@property
def initiation_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def execution_folder(self):
return pathlib.Path(self._input_params['simulation_folder']).as_posix()
@property
def params_folder(self):
return pathlib.Path(self.execution_folder, self._input_params['params_folder']).resolve().as_posix()
def requires(self):
return {'PrepareFolderCumulativeRadiation': PrepareFolderCumulativeRadiation(_input_params=self._input_params), 'RestructureResults': RestructureResults(_input_params=self._input_params)}
def output(self):
return {
'is_done': luigi.LocalTarget(pathlib.Path(self.execution_folder, 'cumulative_radiation_postprocess.done').resolve().as_posix())
}
class _Main_49ddb174Orchestrator(luigi.WrapperTask):
"""Runs all the tasks in this module."""
# user input for this module
_input_params = luigi.DictParameter()
@property
def input_values(self):
params = dict(_default_inputs)
params.update(dict(self._input_params))
return params
def requires(self):
yield [CumulativeRadiationPostprocess(_input_params=self.input_values)]
|
PypiClean
|
/xs_transformers-1.0.7-py3-none-any.whl/xs_transformers/models/squeezebert/tokenization_squeezebert_fast.py
|
"""Tokenization classes for SqueezeBERT."""
import json
from typing import List, Optional, Tuple
from tokenizers import normalizers
from ...tokenization_utils_fast import PreTrainedTokenizerFast
from ...utils import logging
from .tokenization_squeezebert import SqueezeBertTokenizer
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt", "tokenizer_file": "tokenizer.json"}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"squeezebert/squeezebert-uncased": (
"https://huggingface.co/squeezebert/squeezebert-uncased/resolve/main/vocab.txt"
),
"squeezebert/squeezebert-mnli": "https://huggingface.co/squeezebert/squeezebert-mnli/resolve/main/vocab.txt",
"squeezebert/squeezebert-mnli-headless": (
"https://huggingface.co/squeezebert/squeezebert-mnli-headless/resolve/main/vocab.txt"
),
},
"tokenizer_file": {
"squeezebert/squeezebert-uncased": (
"https://huggingface.co/squeezebert/squeezebert-uncased/resolve/main/tokenizer.json"
),
"squeezebert/squeezebert-mnli": (
"https://huggingface.co/squeezebert/squeezebert-mnli/resolve/main/tokenizer.json"
),
"squeezebert/squeezebert-mnli-headless": (
"https://huggingface.co/squeezebert/squeezebert-mnli-headless/resolve/main/tokenizer.json"
),
},
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"squeezebert/squeezebert-uncased": 512,
"squeezebert/squeezebert-mnli": 512,
"squeezebert/squeezebert-mnli-headless": 512,
}
PRETRAINED_INIT_CONFIGURATION = {
"squeezebert/squeezebert-uncased": {"do_lower_case": True},
"squeezebert/squeezebert-mnli": {"do_lower_case": True},
"squeezebert/squeezebert-mnli-headless": {"do_lower_case": True},
}
# Copied from transformers.models.bert.tokenization_bert_fast.BertTokenizerFast with Bert->SqueezeBert,BERT->SqueezeBERT
class SqueezeBertTokenizerFast(PreTrainedTokenizerFast):
r"""
Construct a "fast" SqueezeBERT tokenizer (backed by HuggingFace's *tokenizers* library). Based on WordPiece.
This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should
refer to this superclass for more information regarding those methods.
Args:
vocab_file (`str`):
File containing the vocabulary.
do_lower_case (`bool`, *optional*, defaults to `True`):
Whether or not to lowercase the input when tokenizing.
unk_token (`str`, *optional*, defaults to `"[UNK]"`):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
sep_token (`str`, *optional*, defaults to `"[SEP]"`):
The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for
sequence classification or for a text and a question for question answering. It is also used as the last
token of a sequence built with special tokens.
pad_token (`str`, *optional*, defaults to `"[PAD]"`):
The token used for padding, for example when batching sequences of different lengths.
cls_token (`str`, *optional*, defaults to `"[CLS]"`):
The classifier token which is used when doing sequence classification (classification of the whole sequence
instead of per-token classification). It is the first token of the sequence when built with special tokens.
mask_token (`str`, *optional*, defaults to `"[MASK]"`):
The token used for masking values. This is the token used when training this model with masked language
modeling. This is the token which the model will try to predict.
clean_text (`bool`, *optional*, defaults to `True`):
Whether or not to clean the text before tokenization by removing any control characters and replacing all
whitespaces by the classic one.
tokenize_chinese_chars (`bool`, *optional*, defaults to `True`):
Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see [this
issue](https://github.com/huggingface/transformers/issues/328)).
strip_accents (`bool`, *optional*):
Whether or not to strip all accents. If this option is not specified, then it will be determined by the
value for `lowercase` (as in the original SqueezeBERT).
wordpieces_prefix (`str`, *optional*, defaults to `"##"`):
The prefix for subwords.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
slow_tokenizer_class = SqueezeBertTokenizer
def __init__(
self,
vocab_file=None,
tokenizer_file=None,
do_lower_case=True,
unk_token="[UNK]",
sep_token="[SEP]",
pad_token="[PAD]",
cls_token="[CLS]",
mask_token="[MASK]",
tokenize_chinese_chars=True,
strip_accents=None,
**kwargs
):
super().__init__(
vocab_file,
tokenizer_file=tokenizer_file,
do_lower_case=do_lower_case,
unk_token=unk_token,
sep_token=sep_token,
pad_token=pad_token,
cls_token=cls_token,
mask_token=mask_token,
tokenize_chinese_chars=tokenize_chinese_chars,
strip_accents=strip_accents,
**kwargs,
)
normalizer_state = json.loads(self.backend_tokenizer.normalizer.__getstate__())
if (
normalizer_state.get("lowercase", do_lower_case) != do_lower_case
or normalizer_state.get("strip_accents", strip_accents) != strip_accents
or normalizer_state.get("handle_chinese_chars", tokenize_chinese_chars)
!= tokenize_chinese_chars
):
normalizer_class = getattr(normalizers, normalizer_state.pop("type"))
normalizer_state["lowercase"] = do_lower_case
normalizer_state["strip_accents"] = strip_accents
normalizer_state["handle_chinese_chars"] = tokenize_chinese_chars
self.backend_tokenizer.normalizer = normalizer_class(**normalizer_state)
self.do_lower_case = do_lower_case
def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. A SqueezeBERT sequence has the following format:
- single sequence: `[CLS] X [SEP]`
- pair of sequences: `[CLS] A [SEP] B [SEP]`
Args:
token_ids_0 (`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
Returns:
`List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens.
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]
if token_ids_1:
output += token_ids_1 + [self.sep_token_id]
return output
def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task. A SqueezeBERT
sequence pair mask has the following format:
```
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
```
If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s).
Args:
token_ids_0 (`List[int]`):
List of IDs.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
Returns:
`List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s).
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return len(cls + token_ids_0 + sep) * [0]
return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1]
def save_vocabulary(
self, save_directory: str, filename_prefix: Optional[str] = None
) -> Tuple[str]:
files = self._tokenizer.model.save(save_directory, name=filename_prefix)
return tuple(files)
|
PypiClean
|
/codat-sync-for-commerce-0.31.0.tar.gz/codat-sync-for-commerce-0.31.0/src/codatsynccommerce/models/shared/companysyncstatus.py
|
from __future__ import annotations
import dataclasses
from codatsynccommerce import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@dataclass_json(undefined=Undefined.EXCLUDE)
@dataclasses.dataclass
class CompanySyncStatus:
r"""Success"""
company_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('companyId'), 'exclude': lambda f: f is None }})
r"""Unique identifier for your SMB in Codat."""
data_pushed: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataPushed'), 'exclude': lambda f: f is None }})
r"""Boolean of whether the sync resulted in data being pushed."""
error_message: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('errorMessage'), 'exclude': lambda f: f is None }})
r"""Error message of the sync."""
sync_exception_message: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncExceptionMessage'), 'exclude': lambda f: f is None }})
r"""Exception message of the sync."""
sync_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncId'), 'exclude': lambda f: f is None }})
r"""Unique identifier of the sync."""
sync_status: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncStatus'), 'exclude': lambda f: f is None }})
r"""Text status of the sync."""
sync_status_code: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncStatusCode'), 'exclude': lambda f: f is None }})
r"""Status code of the sync."""
sync_utc: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncUtc'), 'exclude': lambda f: f is None }})
r"""In Codat's data model, dates and times are represented using the <a class=\\"external\\" href=\\"https://en.wikipedia.org/wiki/ISO_8601\\" target=\\"_blank\\">ISO 8601 standard</a>. Date and time fields are formatted as strings; for example:
```
2020-10-08T22:40:50Z
2021-01-01T00:00:00
```
When syncing data that contains `DateTime` fields from Codat, make sure you support the following cases when reading time information:
- Coordinated Universal Time (UTC): `2021-11-15T06:00:00Z`
- Unqualified local time: `2021-11-15T01:00:00`
- UTC time offsets: `2021-11-15T01:00:00-05:00`
> Time zones
>
> Not all dates from Codat will contain information about time zones.
> Where it is not available from the underlying platform, Codat will return these as times local to the business whose data has been synced.
"""
|
PypiClean
|
/ydk-models-cisco-ios-xr-6.6.3.tar.gz/ydk-models-cisco-ios-xr-6.6.3/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ipv4_hsrp_cfg.py
|
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_BITS, REFERENCE_UNION
from ydk._core._dm_meta_info import REFERENCE_CLASS, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, ANYXML_CLASS
from ydk._core._importer import _yang_ns
_meta_table = {
'HsrpLinklocal' : _MetaInfoEnum('HsrpLinklocal',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'HsrpLinklocal',
'''Hsrp linklocal''',
{
'manual':'manual',
'auto':'auto',
'legacy':'legacy',
}, 'Cisco-IOS-XR-ipv4-hsrp-cfg', _yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg']),
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Bfd' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Bfd', REFERENCE_CLASS,
'''Enable use of Bidirectional Forwarding
Detection''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv6-address-no-zone',
None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Enable BFD for this remote IP
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface name to run BFD
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'bfd',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces.TrackedInterface' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces.TrackedInterface', REFERENCE_LIST,
'''Interface being tracked''',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface being tracked
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interface',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-interface', REFERENCE_LIST, 'TrackedInterface', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces.TrackedInterface',
[], [],
''' Interface being tracked
''',
'tracked_interface',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interfaces',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects.TrackedObject' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects.TrackedObject', REFERENCE_LIST,
'''Object being tracked''',
False,
[
_MetaInfoClassMember('object-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Interface being tracked
''',
'object_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-object',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-object', REFERENCE_LIST, 'TrackedObject', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects.TrackedObject',
[], [],
''' Object being tracked
''',
'tracked_object',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-objects',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Timers' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Timers', REFERENCE_CLASS,
'''Hello and hold timers''',
False,
[
_MetaInfoClassMember('hello-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hello time configured in
milliseconds, FALSE - Hello time
configured in seconds
''',
'hello_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hello-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hello time in msecs
''',
'hello_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hello-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hello time in seconds
''',
'hello_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="3"),
_MetaInfoClassMember('hold-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hold time configured in
milliseconds, FALSE - Hold time
configured in seconds
''',
'hold_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hold-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hold time in msecs
''',
'hold_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hold-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hold time in seconds
''',
'hold_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="10"),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'timers',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.LinkLocalIpv6Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.LinkLocalIpv6Address', REFERENCE_CLASS,
'''The HSRP IPv6 virtual linklocal address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv6-address-no-zone',
None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' HSRP IPv6 virtual linklocal address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, has_when=True),
_MetaInfoClassMember('auto-configure', REFERENCE_ENUM_CLASS, 'HsrpLinklocal', 'Hsrp-linklocal',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'HsrpLinklocal',
[], [],
''' Linklocal Configuration Type
''',
'auto_configure',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='Cisco_IOS_XR_ipv4_hsrp_cfg.HsrpLinklocal.manual'),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'link-local-ipv6-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses.GlobalIpv6Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses.GlobalIpv6Address', REFERENCE_LIST,
'''A HSRP virtual global IPv6 IP address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv6-address-no-zone',
None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' HSRP virtual global IPv6 address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'global-ipv6-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses', REFERENCE_CLASS,
'''The table of HSRP virtual global IPv6
addresses''',
False,
[
_MetaInfoClassMember('global-ipv6-address', REFERENCE_LIST, 'GlobalIpv6Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses.GlobalIpv6Address',
[], [],
''' A HSRP virtual global IPv6 IP address
''',
'global_ipv6_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'global-ipv6-addresses',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group', REFERENCE_LIST,
'''The HSRP group being configured''',
False,
[
_MetaInfoClassMember('group-number', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4095')], [],
''' HSRP group number
''',
'group_number',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('bfd', REFERENCE_CLASS, 'Bfd', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Bfd',
[], [],
''' Enable use of Bidirectional Forwarding
Detection
''',
'bfd',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('tracked-interfaces', REFERENCE_CLASS, 'TrackedInterfaces', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_interfaces',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('tracked-objects', REFERENCE_CLASS, 'TrackedObjects', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_objects',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('timers', REFERENCE_CLASS, 'Timers', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Timers',
[], [],
''' Hello and hold timers
''',
'timers',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('link-local-ipv6-address', REFERENCE_CLASS, 'LinkLocalIpv6Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.LinkLocalIpv6Address',
[], [],
''' The HSRP IPv6 virtual linklocal address
''',
'link_local_ipv6_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('global-ipv6-addresses', REFERENCE_CLASS, 'GlobalIpv6Addresses', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses',
[], [],
''' The table of HSRP virtual global IPv6
addresses
''',
'global_ipv6_addresses',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '255')], [],
''' Priority value
''',
'priority',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="100"),
_MetaInfoClassMember('preempt', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4294967295')], [],
''' Force active if higher priority
''',
'preempt',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="0"),
_MetaInfoClassMember('session-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[(1, 16)], [],
''' HSRP Session name (for MGO)
''',
'session_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('virtual-mac-address', ATTRIBUTE, 'str', 'yang:mac-address',
None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' HSRP MAC address
''',
'virtual_mac_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'group',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2.Groups', REFERENCE_CLASS,
'''The HSRP group configuration table''',
False,
[
_MetaInfoClassMember('group', REFERENCE_LIST, 'Group', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group',
[], [],
''' The HSRP group being configured
''',
'group',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'groups',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.Version2' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.Version2', REFERENCE_CLASS,
'''Version 2 HSRP configuration''',
False,
[
_MetaInfoClassMember('groups', REFERENCE_CLASS, 'Groups', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2.Groups',
[], [],
''' The HSRP group configuration table
''',
'groups',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'version2',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.LinkLocalIpv6Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.LinkLocalIpv6Address', REFERENCE_CLASS,
'''The HSRP IPv6 virtual linklocal address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv6-address-no-zone',
None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' HSRP IPv6 virtual linklocal address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, has_when=True),
_MetaInfoClassMember('auto-configure', REFERENCE_ENUM_CLASS, 'HsrpLinklocal', 'Hsrp-linklocal',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'HsrpLinklocal',
[], [],
''' Linklocal Configuration Type
''',
'auto_configure',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='Cisco_IOS_XR_ipv4_hsrp_cfg.HsrpLinklocal.manual'),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'link-local-ipv6-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses.GlobalIpv6Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses.GlobalIpv6Address', REFERENCE_LIST,
'''A HSRP virtual global IPv6 IP address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv6-address-no-zone',
None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' HSRP virtual global IPv6 address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'global-ipv6-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses', REFERENCE_CLASS,
'''The table of HSRP virtual global IPv6
addresses''',
False,
[
_MetaInfoClassMember('global-ipv6-address', REFERENCE_LIST, 'GlobalIpv6Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses.GlobalIpv6Address',
[], [],
''' A HSRP virtual global IPv6 IP address
''',
'global_ipv6_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'global-ipv6-addresses',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup', REFERENCE_LIST,
'''The HSRP slave group being configured''',
False,
[
_MetaInfoClassMember('slave-group-number', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4095')], [],
''' HSRP group number
''',
'slave_group_number',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('link-local-ipv6-address', REFERENCE_CLASS, 'LinkLocalIpv6Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.LinkLocalIpv6Address',
[], [],
''' The HSRP IPv6 virtual linklocal address
''',
'link_local_ipv6_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('global-ipv6-addresses', REFERENCE_CLASS, 'GlobalIpv6Addresses', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses',
[], [],
''' The table of HSRP virtual global IPv6
addresses
''',
'global_ipv6_addresses',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('follow', ATTRIBUTE, 'str', 'string',
None, None,
[], [],
''' HSRP Group name for this slave to follow
''',
'follow',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('virtual-mac-address', ATTRIBUTE, 'str', 'yang:mac-address',
None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' HSRP MAC address
''',
'virtual_mac_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'slave-group',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6.SlaveGroups', REFERENCE_CLASS,
'''The HSRP slave group configuration table''',
False,
[
_MetaInfoClassMember('slave-group', REFERENCE_LIST, 'SlaveGroup', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup',
[], [],
''' The HSRP slave group being configured
''',
'slave_group',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'slave-groups',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv6' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv6', REFERENCE_CLASS,
'''IPv6 HSRP configuration''',
False,
[
_MetaInfoClassMember('version2', REFERENCE_CLASS, 'Version2', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.Version2',
[], [],
''' Version 2 HSRP configuration
''',
'version2',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('slave-groups', REFERENCE_CLASS, 'SlaveGroups', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6.SlaveGroups',
[], [],
''' The HSRP slave group configuration table
''',
'slave_groups',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'ipv6',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Bfd' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Bfd', REFERENCE_CLASS,
'''BFD configuration''',
False,
[
_MetaInfoClassMember('detection-multiplier', ATTRIBUTE, 'int', 'uint32',
None, None,
[('2', '50')], [],
''' Detection multiplier for BFD sessions created
by hsrp
''',
'detection_multiplier',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('interval', ATTRIBUTE, 'int', 'uint32',
None, None,
[('3', '30000')], [],
''' Hello interval for BFD sessions created by
hsrp
''',
'interval',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'bfd',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Delay' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Delay', REFERENCE_CLASS,
'''Minimum and Reload Delay''',
False,
[
_MetaInfoClassMember('minimum-delay', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '10000')], [],
''' Minimum delay in seconds
''',
'minimum_delay',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
_MetaInfoClassMember('reload-delay', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '10000')], [],
''' Reload delay in seconds
''',
'reload_delay',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'delay',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
is_presence=True,
),
},
'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses.SecondaryIpv4Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses.SecondaryIpv4Address', REFERENCE_LIST,
'''Secondary HSRP IP address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' HSRP IP address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses', REFERENCE_CLASS,
'''Secondary HSRP IP address Table''',
False,
[
_MetaInfoClassMember('secondary-ipv4-address', REFERENCE_LIST, 'SecondaryIpv4Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses.SecondaryIpv4Address',
[], [],
''' Secondary HSRP IP address
''',
'secondary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-addresses',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup', REFERENCE_LIST,
'''The HSRP slave group being configured''',
False,
[
_MetaInfoClassMember('slave-group-number', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4095')], [],
''' HSRP group number
''',
'slave_group_number',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('secondary-ipv4-addresses', REFERENCE_CLASS, 'SecondaryIpv4Addresses', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses',
[], [],
''' Secondary HSRP IP address Table
''',
'secondary_ipv4_addresses',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('follow', ATTRIBUTE, 'str', 'string',
None, None,
[], [],
''' HSRP Group name for this slave to follow
''',
'follow',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('virtual-mac-address', ATTRIBUTE, 'str', 'yang:mac-address',
None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' HSRP MAC address
''',
'virtual_mac_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('primary-ipv4-address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Primary HSRP IP address
''',
'primary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'slave-group',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.SlaveGroups', REFERENCE_CLASS,
'''The HSRP slave group configuration table''',
False,
[
_MetaInfoClassMember('slave-group', REFERENCE_LIST, 'SlaveGroup', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup',
[], [],
''' The HSRP slave group being configured
''',
'slave_group',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'slave-groups',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces.TrackedInterface' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces.TrackedInterface', REFERENCE_LIST,
'''Interface being tracked''',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface being tracked
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interface',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-interface', REFERENCE_LIST, 'TrackedInterface', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces.TrackedInterface',
[], [],
''' Interface being tracked
''',
'tracked_interface',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interfaces',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Bfd' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Bfd', REFERENCE_CLASS,
'''Enable use of Bidirectional Forwarding
Detection''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Enable BFD for this remote IP
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface name to run BFD
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'bfd',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects.TrackedObject' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects.TrackedObject', REFERENCE_LIST,
'''Object being tracked''',
False,
[
_MetaInfoClassMember('object-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Interface being tracked
''',
'object_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-object',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-object', REFERENCE_LIST, 'TrackedObject', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects.TrackedObject',
[], [],
''' Object being tracked
''',
'tracked_object',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-objects',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Timers' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Timers', REFERENCE_CLASS,
'''Hello and hold timers''',
False,
[
_MetaInfoClassMember('hello-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hello time configured in
milliseconds, FALSE - Hello time
configured in seconds
''',
'hello_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hello-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hello time in msecs
''',
'hello_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hello-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hello time in seconds
''',
'hello_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="3"),
_MetaInfoClassMember('hold-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hold time configured in
milliseconds, FALSE - Hold time
configured in seconds
''',
'hold_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hold-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hold time in msecs
''',
'hold_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hold-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hold time in seconds
''',
'hold_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="10"),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'timers',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.PrimaryIpv4Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.PrimaryIpv4Address', REFERENCE_CLASS,
'''Primary HSRP IP address''',
False,
[
_MetaInfoClassMember('virtual-ip-learn', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE if the HSRP protocol is to learn the
virtual IP address it is to use
''',
'virtual_ip_learn',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' HSRP IP address.
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, has_when=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'primary-ipv4-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address', REFERENCE_LIST,
'''Secondary HSRP IP address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' HSRP IP address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses', REFERENCE_CLASS,
'''Secondary HSRP IP address Table''',
False,
[
_MetaInfoClassMember('secondary-ipv4-address', REFERENCE_LIST, 'SecondaryIpv4Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address',
[], [],
''' Secondary HSRP IP address
''',
'secondary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-addresses',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group', REFERENCE_LIST,
'''The HSRP group being configured''',
False,
[
_MetaInfoClassMember('group-number', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '255')], [],
''' HSRP group number
''',
'group_number',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('tracked-interfaces', REFERENCE_CLASS, 'TrackedInterfaces', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_interfaces',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('bfd', REFERENCE_CLASS, 'Bfd', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Bfd',
[], [],
''' Enable use of Bidirectional Forwarding
Detection
''',
'bfd',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('tracked-objects', REFERENCE_CLASS, 'TrackedObjects', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_objects',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('timers', REFERENCE_CLASS, 'Timers', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Timers',
[], [],
''' Hello and hold timers
''',
'timers',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('primary-ipv4-address', REFERENCE_CLASS, 'PrimaryIpv4Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.PrimaryIpv4Address',
[], [],
''' Primary HSRP IP address
''',
'primary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('secondary-ipv4-addresses', REFERENCE_CLASS, 'SecondaryIpv4Addresses', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses',
[], [],
''' Secondary HSRP IP address Table
''',
'secondary_ipv4_addresses',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('authentication', ATTRIBUTE, 'str', 'string',
None, None,
[(1, 8)], [],
''' Authentication string
''',
'authentication',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="'cisco'"),
_MetaInfoClassMember('session-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[(1, 16)], [],
''' HSRP Session name (for MGO)
''',
'session_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '255')], [],
''' Priority value
''',
'priority',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="100"),
_MetaInfoClassMember('preempt', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4294967295')], [],
''' Force active if higher priority
''',
'preempt',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="0"),
_MetaInfoClassMember('virtual-mac-address', ATTRIBUTE, 'str', 'yang:mac-address',
None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' HSRP MAC address
''',
'virtual_mac_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'group',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1.Groups', REFERENCE_CLASS,
'''The HSRP group configuration table''',
False,
[
_MetaInfoClassMember('group', REFERENCE_LIST, 'Group', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group',
[], [],
''' The HSRP group being configured
''',
'group',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'groups',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version1' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version1', REFERENCE_CLASS,
'''Version 1 HSRP configuration''',
False,
[
_MetaInfoClassMember('groups', REFERENCE_CLASS, 'Groups', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1.Groups',
[], [],
''' The HSRP group configuration table
''',
'groups',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'version1',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address', REFERENCE_LIST,
'''Secondary HSRP IP address''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' HSRP IP address
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses', REFERENCE_CLASS,
'''Secondary HSRP IP address Table''',
False,
[
_MetaInfoClassMember('secondary-ipv4-address', REFERENCE_LIST, 'SecondaryIpv4Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address',
[], [],
''' Secondary HSRP IP address
''',
'secondary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'secondary-ipv4-addresses',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Bfd' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Bfd', REFERENCE_CLASS,
'''Enable use of Bidirectional Forwarding
Detection''',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Enable BFD for this remote IP
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface name to run BFD
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'bfd',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.PrimaryIpv4Address' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.PrimaryIpv4Address', REFERENCE_CLASS,
'''Primary HSRP IP address''',
False,
[
_MetaInfoClassMember('virtual-ip-learn', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE if the HSRP protocol is to learn the
virtual IP address it is to use
''',
'virtual_ip_learn',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str', 'inet:ipv4-address-no-zone',
None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' HSRP IP address.
''',
'address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, has_when=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'primary-ipv4-address',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects.TrackedObject' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects.TrackedObject', REFERENCE_LIST,
'''Object being tracked''',
False,
[
_MetaInfoClassMember('object-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Interface being tracked
''',
'object_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-object',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-object', REFERENCE_LIST, 'TrackedObject', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects.TrackedObject',
[], [],
''' Object being tracked
''',
'tracked_object',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-objects',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces.TrackedInterface' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces.TrackedInterface', REFERENCE_LIST,
'''Interface being tracked''',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface being tracked
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('priority-decrement', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Priority decrement
''',
'priority_decrement',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_mandatory=True),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interface',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces', REFERENCE_CLASS,
'''The HSRP tracked interface configuration
table''',
False,
[
_MetaInfoClassMember('tracked-interface', REFERENCE_LIST, 'TrackedInterface', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces.TrackedInterface',
[], [],
''' Interface being tracked
''',
'tracked_interface',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'tracked-interfaces',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Timers' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Timers', REFERENCE_CLASS,
'''Hello and hold timers''',
False,
[
_MetaInfoClassMember('hello-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hello time configured in
milliseconds, FALSE - Hello time
configured in seconds
''',
'hello_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hello-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hello time in msecs
''',
'hello_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hello-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hello time in seconds
''',
'hello_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="3"),
_MetaInfoClassMember('hold-msec-flag', ATTRIBUTE, 'bool', 'boolean',
None, None,
[], [],
''' TRUE - Hold time configured in
milliseconds, FALSE - Hold time
configured in seconds
''',
'hold_msec_flag',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value='False'),
_MetaInfoClassMember('hold-msec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('100', '3000')], [],
''' Hold time in msecs
''',
'hold_msec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('hold-sec', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '255')], [],
''' Hold time in seconds
''',
'hold_sec',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="10"),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'timers',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group', REFERENCE_LIST,
'''The HSRP group being configured''',
False,
[
_MetaInfoClassMember('group-number', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4095')], [],
''' HSRP group number
''',
'group_number',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('secondary-ipv4-addresses', REFERENCE_CLASS, 'SecondaryIpv4Addresses', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses',
[], [],
''' Secondary HSRP IP address Table
''',
'secondary_ipv4_addresses',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('bfd', REFERENCE_CLASS, 'Bfd', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Bfd',
[], [],
''' Enable use of Bidirectional Forwarding
Detection
''',
'bfd',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('primary-ipv4-address', REFERENCE_CLASS, 'PrimaryIpv4Address', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.PrimaryIpv4Address',
[], [],
''' Primary HSRP IP address
''',
'primary_ipv4_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('tracked-objects', REFERENCE_CLASS, 'TrackedObjects', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_objects',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('tracked-interfaces', REFERENCE_CLASS, 'TrackedInterfaces', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces',
[], [],
''' The HSRP tracked interface configuration
table
''',
'tracked_interfaces',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('timers', REFERENCE_CLASS, 'Timers', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Timers',
[], [],
''' Hello and hold timers
''',
'timers',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('preempt', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '4294967295')], [],
''' Force active if higher priority
''',
'preempt',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="0"),
_MetaInfoClassMember('priority', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '255')], [],
''' Priority value
''',
'priority',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="100"),
_MetaInfoClassMember('virtual-mac-address', ATTRIBUTE, 'str', 'yang:mac-address',
None, None,
[], [b'[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' HSRP MAC address
''',
'virtual_mac_address',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('session-name', ATTRIBUTE, 'str', 'xr:Cisco-ios-xr-string',
None, None,
[(1, 16)], [],
''' HSRP Session name (for MGO)
''',
'session_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'group',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2.Groups', REFERENCE_CLASS,
'''The HSRP group configuration table''',
False,
[
_MetaInfoClassMember('group', REFERENCE_LIST, 'Group', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group',
[], [],
''' The HSRP group being configured
''',
'group',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'groups',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4.Version2' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4.Version2', REFERENCE_CLASS,
'''Version 2 HSRP configuration''',
False,
[
_MetaInfoClassMember('groups', REFERENCE_CLASS, 'Groups', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2.Groups',
[], [],
''' The HSRP group configuration table
''',
'groups',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'version2',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface.Ipv4' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface.Ipv4', REFERENCE_CLASS,
'''IPv4 HSRP configuration''',
False,
[
_MetaInfoClassMember('slave-groups', REFERENCE_CLASS, 'SlaveGroups', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.SlaveGroups',
[], [],
''' The HSRP slave group configuration table
''',
'slave_groups',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('version1', REFERENCE_CLASS, 'Version1', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version1',
[], [],
''' Version 1 HSRP configuration
''',
'version1',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('version2', REFERENCE_CLASS, 'Version2', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4.Version2',
[], [],
''' Version 2 HSRP configuration
''',
'version2',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'ipv4',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces.Interface', REFERENCE_LIST,
'''Per-interface HSRP configuration''',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str', 'xr:Interface-name',
None, None,
[], [b'[a-zA-Z0-9._/-]+'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ipv4-hsrp-cfg', True),
_MetaInfoClassMember('ipv6', REFERENCE_CLASS, 'Ipv6', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv6',
[], [],
''' IPv6 HSRP configuration
''',
'ipv6',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('bfd', REFERENCE_CLASS, 'Bfd', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Bfd',
[], [],
''' BFD configuration
''',
'bfd',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('delay', REFERENCE_CLASS, 'Delay', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Delay',
[], [],
''' Minimum and Reload Delay
''',
'delay',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, is_presence=True),
_MetaInfoClassMember('ipv4', REFERENCE_CLASS, 'Ipv4', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface.Ipv4',
[], [],
''' IPv4 HSRP configuration
''',
'ipv4',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('mac-refresh', ATTRIBUTE, 'int', 'uint32',
None, None,
[('0', '10000')], [],
''' HSRP MGO slave MAC refresh rate
''',
'mac_refresh',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False, default_value="60"),
_MetaInfoClassMember('use-bia', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' Use burned-in address
''',
'use_bia',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('redirects-disable', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' Disable HSRP filtered ICMP redirects
''',
'redirects_disable',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'interface',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Interfaces' : {
'meta_info' : _MetaInfoClass('Hsrp.Interfaces', REFERENCE_CLASS,
'''Interface Table for HSRP configuration''',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces.Interface',
[], [],
''' Per-interface HSRP configuration
''',
'interface',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'interfaces',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp.Logging' : {
'meta_info' : _MetaInfoClass('Hsrp.Logging', REFERENCE_CLASS,
'''HSRP logging options''',
False,
[
_MetaInfoClassMember('state-change-disable', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' HSRP state change IOS messages disable
''',
'state_change_disable',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'logging',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
'Hsrp' : {
'meta_info' : _MetaInfoClass('Hsrp', REFERENCE_CLASS,
'''HSRP configuration''',
False,
[
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Interfaces',
[], [],
''' Interface Table for HSRP configuration
''',
'interfaces',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_CLASS, 'Logging', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg', 'Hsrp.Logging',
[], [],
''' HSRP logging options
''',
'logging',
'Cisco-IOS-XR-ipv4-hsrp-cfg', False),
],
'Cisco-IOS-XR-ipv4-hsrp-cfg',
'hsrp',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-ipv4-hsrp-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_cfg',
),
},
}
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces.TrackedInterface']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects.TrackedObject']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses.GlobalIpv6Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Bfd']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedInterfaces']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.TrackedObjects']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.Timers']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.LinkLocalIpv6Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group.GlobalIpv6Addresses']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups.Group']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2.Groups']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses.GlobalIpv6Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.LinkLocalIpv6Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup.GlobalIpv6Addresses']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups.SlaveGroup']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.Version2']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6.SlaveGroups']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv6']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup.SecondaryIpv4Addresses']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups.SlaveGroup']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces.TrackedInterface']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects.TrackedObject']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedInterfaces']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Bfd']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.TrackedObjects']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.Timers']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.PrimaryIpv4Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group.SecondaryIpv4Addresses']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups.Group']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1.Groups']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses.SecondaryIpv4Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects.TrackedObject']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces.TrackedInterface']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.SecondaryIpv4Addresses']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Bfd']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.PrimaryIpv4Address']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedObjects']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.TrackedInterfaces']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group.Timers']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups.Group']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2.Groups']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.SlaveGroups']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version1']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4.Version2']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface.Ipv4']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv6']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Bfd']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Delay']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface']['meta_info']
_meta_table['Hsrp.Interfaces.Interface.Ipv4']['meta_info'].parent =_meta_table['Hsrp.Interfaces.Interface']['meta_info']
_meta_table['Hsrp.Interfaces.Interface']['meta_info'].parent =_meta_table['Hsrp.Interfaces']['meta_info']
_meta_table['Hsrp.Interfaces']['meta_info'].parent =_meta_table['Hsrp']['meta_info']
_meta_table['Hsrp.Logging']['meta_info'].parent =_meta_table['Hsrp']['meta_info']
|
PypiClean
|
/tmlt_core-0.11.0-cp38-cp38-macosx_11_0_x86_64.whl/tmlt/core/transformations/chaining.py
|
# SPDX-License-Identifier: Apache-2.0
# Copyright Tumult Labs 2023
from typing import Any, Callable, Optional
from typeguard import typechecked
from tmlt.core.exceptions import DomainMismatchError, MetricMismatchError
from tmlt.core.transformations.base import Transformation
class ChainTT(Transformation):
"""Transformation constructed by chaining two transformations."""
@typechecked
def __init__(
self,
transformation1: Transformation,
transformation2: Transformation,
hint: Optional[Callable[[Any, Any], Any]] = None,
):
"""Constructor.
Args:
transformation1: Transformation to apply first.
transformation2: Transformation to apply second.
hint: An optional function to compute the intermediate metric value
(after the first transformation, but before the second) for
:meth:`~.stability_relation`. It takes in the same inputs
as :meth:`~.stability_relation`, and is only required if
the transformation's :meth:`~.Transformation.stability_function` raises
:class:`NotImplementedError`.
"""
if transformation1.output_domain != transformation2.input_domain:
raise DomainMismatchError(
(transformation1.output_domain, transformation2.input_domain),
"Can not chain transformations: Mismatching domains.",
)
if transformation1.output_metric != transformation2.input_metric:
raise MetricMismatchError(
(transformation1.output_metric, transformation2.input_metric),
"Can not chain transformations: Mismatching metrics.",
)
super().__init__(
input_domain=transformation1.input_domain,
input_metric=transformation1.input_metric,
output_domain=transformation2.output_domain,
output_metric=transformation2.output_metric,
)
self._transformation1 = transformation1
self._transformation2 = transformation2
self._hint = hint
@typechecked
def stability_function(self, d_in: Any) -> Any:
"""Returns the smallest d_out satisfied by the transformation.
Returns M.privacy_function(T.stability_function(d_in)).
where:
* T1 is the first transformation applied (:attr:`~.transformation1`)
* T2 is the second transformation applied (:attr:`~.transformation2`)
Args:
d_in: Distance between inputs under input_metric.
Raises:
NotImplementedError: If T2.stability_function(T1.stability_function(d_in))
raises :class:`NotImplementedError`.
"""
return self.transformation2.stability_function(
self.transformation1.stability_function(d_in)
)
@typechecked
def stability_relation(self, d_in: Any, d_out: Any) -> bool:
"""Returns True only if outputs are close under close inputs.
Let d_mid = T1.stability_function(d_in), or hint(d_in, d_out) if
T1.stability_function raises :class:`NotImplementedError`.
This returns True only if the following hold:
(1) T1.stability_relation(d_in, d_mid)
(2) T2.stability_relation(d_mid, d_out)
where:
* T1 is the first transformation applied (:attr:`~.transformation1`)
* T2 is the second transformation applied (:attr:`~.transformation2`)
* hint is the hint passed to the constructor.
Args:
d_in: Distance between inputs under input_metric.
d_out: Distance between outputs under output_metric.
"""
self.input_metric.validate(d_in)
self.output_metric.validate(d_out)
try:
d_mid = self.transformation1.stability_function(d_in)
except NotImplementedError as e:
if self._hint is None:
raise ValueError(
"A hint is needed to check this privacy relation, because the "
"stability_relation of self.transformation1 raised a "
f"NotImplementedError: {e}"
) from e
d_mid = self._hint(d_in, d_out)
return self.transformation1.stability_relation(
d_in, d_mid
) and self.transformation2.stability_relation(d_mid, d_out)
@property
def transformation1(self) -> Transformation:
"""Returns the first transformation being applied."""
return self._transformation1
@property
def transformation2(self) -> Transformation:
"""Returns the second transformation being applied."""
return self._transformation2
def __call__(self, data: Any) -> Any:
"""Performs transformation1 followed by transformation2."""
return self._transformation2(self._transformation1(data))
|
PypiClean
|
/acsoo-3.3.0.tar.gz/acsoo-3.3.0/CHANGES.rst
|
Changes
~~~~~~~
.. Future (?)
.. ----------
.. -
3.1.0 (2021-01-04)
------------------
- pr-status: detect PR status from github pull requests URLs found in requirement files;
useful to detect them in comments
- Various improvements to the project template (in gitlab-ci.yml, and
pre-commit config, mostly) `#75 <https://github.com/acsone/acsoo/pull/75>`_,
`#77 <https://github.com/acsone/acsoo/pull/77>`_.
- In acsoo tag, do not complain about empty directories `#76
<https://github.com/acsone/acsoo/pull/76>`_.
- Deprecate ``acsoo freeze`` in favor of ``pip-deepfreeze``.
- Deprecate ``acsoo addons`` in favor of ``manifestoo``.
- Add license and development status check to project template.
- Remove bumpversion from acsoo dependencies. This project is now replaced by
bump2versions, and it's better to install it separately with pipx.
3.0.2 (2020-10-14)
------------------
- Lift setuptools version restriction.
It has https issues with pypi.org since 2020-10-14.
his means odoo-autodiscover>=2 must be used
on Odoo<=10 projects.
See also https://github.com/acsone/setuptools-odoo/issues/10.
3.0.1 (2020-07-29)
------------------
- [REM] Remove mrbob dependency as pip >= 20.2 make install crash
3.0.0 (2020-07-01)
------------------
- [DEL] drop python 2 support (previous versions of acsoo are still available
on PyPI, and for regular use, the python 3 version works for Odoo 8, 9, 10
projects too)
- [ADD] acsoo freeze to limit pip freeze output to dependencies of a given
distribution
- [ADD] acsoo pr-status to print the status of GitHub pull requests
found in requirement files with revision of the form refs/pull/NNN/head
- [DEL] deprecate ``acsoo wheel`` (now supported by pip natively) and
``acsoo release`` (which is automated in GitLab CI)
- [IMP] project template: ci.skip when pushing translation updates
2.1.0 (2020-05-25)
------------------
- [IMP] project template: better and simpler isort config
- [IMP] project template: merge request templates
- [IMP] support non-editable VCS requirements in tag_requirements command
- [DEL] remove --force option of tag_requirements command
as it does nothing useful, since all we want of this command
is to make sure that a tag is present
- [MNT] declarative setuptools configuration
- [IMP] pin flake8 to version 3.7.9 (reminder: acsoo flake8 is deprecated,
use pre-commit instead)
- [IMP] pin pylint-odoo to version 3.1.0 (reminder: acsoo pylint is deprecated,
use pre-commit instead)
2.0.0 (2020-01-15)
------------------
- [IMP] project template: publish html coverage in gitlab-ci
- [IMP] project template: branch coverage in gitlab-ci
- [IMP] project template: pre-commit cache in gitlab-ci
- [DEL] deprecate acsoo pylint in favor of pre-commit and per project .pylintrc
- [IMP] Odoo 13 support
- [IMP] project template: rename requirements-dev.txt to requirements.txt.in,
better reflecting that these are input requirements and not requirements
for the development environment
- [IMP] project template: update copyright year
- [IMP] project template: remove module_auto_update, use click-odoo-update instead
1.9.0 (2019-02-28)
------------------
- [IMP] project template: use pre-commit (black, isort, flake8)
- [FIX] project template: fail on click-odoo-update error
- [FIX] project template: fix deploy log file
- [FIX] acsoo pylint: compatibility with pylint 2
1.8.3 (2019-01-22)
------------------
- [FIX] acsoo pylint: Adapt config to also work with pytlint-odoo 2.0.1
- [IMP] project template: use click-odoo-update
1.8.2 (2018-11-05)
------------------
- [IMP] project template: better way to declare python version
in .gitlab-ci.yml
- Fix acsoo tag for Odoo 12
1.8.1 (2018-10-30)
------------------
- [IMP] ignore pylint C0303 (https://github.com/PyCQA/pylint/issues/289)
1.8.0 (2018-10-29)
------------------
- [IMP] acsoo wheel: add --no-deps, so we can build requirements.txt without
fetching dependencies, and later install the project with --no-index and
--find-links=release/ so as to detect missing dependencies (#38)
- [IMP] acsoo wheel: add --exclude-project option (to build requirements.txt
without the current project), in preparation of #44
- [IMP] acsoo wheel: use a cache of editable git dependencies
- [IMP] acsoo wheel: use pip wheel -e . to build project instead of
setup.py bdist_wheel, since the reason we were doing that has apparently
been resolved in recent pip version (pip issue 3499 referred in a comment
is apparently unrelated unfortunately, so I'm not sure why we were
doing that exactly, probably https://github.com/pypa/pip/issues/3500)
- [IMP] flake8: ignore W503 and W504 by default (line break around logical
operators)
- [IMP] project template: Odoo 12 support
- [IMP] project template: pin acsoo version
- [IMP] project template: acsoo wheel --no-deps, so, combined with
pip install --no-index in the test stage, it verifies that all dependencies
are included in requirements.txt
1.7.1 (2018-07-15)
------------------
- [IMP] project template: add makepot in .gitlab-ci.yml
- [IMP] pylint: whitelist lxml c library
1.7.0 (2018-06-04)
------------------
- [IMP] more python 3 and Odoo 11 support
- [IMP] project template: build stage in gitlab-ci
- [IMP] project template: new style deploy / upgrade
(using checksum upgrades and click-odoo-upgrade script)
- [IMP] project template: enforce odoo-autodiscover>=2 and do not use it
for Odoo >= 11
- [IMP] add --dry-run option to acsoo tag and tag_requirements
- [IMP] make the list of places where tag_requirements can push
configurable
- [IMP] project template: on demand installation of acsoo and ssh-agent
- [IMP] project template: use click-odoo-initdb in gitlab-ci
1.6.0 (2018-02-16)
------------------
- [IMP] checklog: add --no-err-if-empty option
- [IMP] python 3 support
- [IMP] preliminary Odoo 11 support
- [IMP] project template: various improvements
- [IMP] refactoring of get_installable_addons() method for better reusability
1.5.0 (2017-09-19)
------------------
- [IMP] tag_requirements: fetch more aggressively; this solves the errors
trying to write ref with non existent object
- [IMP] tag: always tag requirements when doing acsoo tag
- [IMP] tag: tag requirements before tagging project, so if something fails
when tagging the requirements the project is not tagged and the release
build is not triggered.
- [ADD] addons: add --separator option (and fix tests that were not testing much)
- [IMP] addons: consider current dir as addons dir candidate
- [IMP] pylint: look for module to test in current dir by default, using the
same algorithm as ``addons list``
- [IMP] pylint: support python 3 style odoo/addons namespace (without __init__.py)
1.4.3 (2017-06-16)
------------------
- [IMP] checklog: consider ignore lines starting with # as comments
- [FIX] checklog: the previous release broke checklog color output
1.4.2 (2017-06-16)
------------------
- [IMP] checklog: fail if no log record found in input
- [IMP] checklog: echo with click to be less sensitive to unicode issues
1.4.1 (2017-06-14)
------------------
- [FIX] regression in acsoo release
1.4.0 (2017-06-13)
------------------
- [IMP] colored logging
- [IMP] major change to acsoo tag and tag_editable_requirements. These changes
make it easier to work with a CI-driven release process that is triggered on
new tags. The usual manual ``acsoo release`` process should be mostly unimpacted by
these changes.
- ``tag_editable_requirements`` is now ``tag_requirements``.
- the tags structure has changed from ``{series}-{trigram}_{version}`` to
``{series}-{trigram}-{req_sha}-{egg}``, where ``{req_sha}`` is the sha of the
last change to ``requirements.txt``.
- ``tag_requirements`` includes the egg name in the tag so different commits
in the same repo can be tagged (before, all addons in a given dependency repo had
to be on the same commit).
- when a tag for the given series, trigram and egg already exists on the
dependency commit, ``tag_requirements`` does not attempt to create another
tag (this avoids creating useless tags or forced tags) and
this is sufficient because the sole purpose of these dependency tags is
to avoid commits to be garbage collected.
- ``acsoo tag`` now invokes ``tag_requirements``. In most cases however this
will not place additional tags on dependencies, because the normal workflow
is to invoke ``tag_requirements`` as soon as ``requirements.txt`` is updated.
- ``tag_requirements`` automatically transforms http(s) urls into ssh urls
for the purpose of pushing tags. This allows to maximize the use of http(s)
urls in requirements so CI and scripts do not require ssh access
to the public dependencies. This currently only works for the acsone organization
on github but the mechanism is easy to extend, should the need arise.
1.3.0 (2017-06-04)
------------------
- [IMP] flake8: read additional ``flake8-options`` in acsoo configuration file.
- [IMP] template: series-dependent odoo command in ``.gitlab.ci.yml``.
- [IMP] template: createdb in ``.gitlab-ci.yml`` because Odoo 8 does not do it by
itself.
- [ADD] addons list-depends: ``--exclude`` option
1.2.2 (2017-05-30)
------------------
- [FIX] regression in ``tag``, ``tag_editable_requirements`` and ``release`` commands.
1.2.1 (2017-05-27)
------------------
- [IMP] add possibility to provide main config file as option.
- [IMP] checklog: read default options from ``[checklog]`` section of config file.
- [IMP] pylint: read default options from ``[pylint]`` section of config file.
- [IMP] pylint: the module or package to lint may be provided with ``-m``.
- [IMP] flake8: read default options from ``[flake8]`` section of config file.
The only option so far is ``config`` to provide an alternate flake8
configuration file. This is useful so developer only need to type
``acsoo flake8`` locally, even when a specific configuration is needed,
so it's trivial to run locally with the same config as in CI.
1.1.0 (2017-05-25)
------------------
- [IMP] pylint: BREAKING the package to test must be provided explicitly,
as soon as additional pylint options are provided,
so as to enable easy local testing of a subset of a project. Examples:
``acsoo pylint -- -d some-message odoo``, ``acsoo pylint -- odoo.addons.xyz``;
- [IMP] pylint: disable more code complexity errors: ``too-many-nested-blocks``,
``too-many-return-statements``.
- [IMP] pylint: display messages causing failure last, so emails from CI.
that show the last lines of the log are more relevant.
- [IMP] pylint: display summary of messages that did not cause failure, also
when there is no failure.
- [ADD] ``acsoo addons list`` and ``acsoo addons list-depends``.
- [ADD] ``acsoo checklog``.
1.0.1 (2017-05-21)
------------------
- First public release.
|
PypiClean
|
/async_dash-0.1.0a0-py3-none-any.whl/dash/html/Details.py
|
from dash.development.base_component import Component, _explicitize_args
class Details(Component):
"""A Details component.
Details is a wrapper for the <details> HTML5 element.
For detailed attribute info see:
https://developer.mozilla.org/en-US/docs/Web/HTML/Element/details
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional):
The children of this component.
- id (string; optional):
The ID of this component, used to identify dash components in
callbacks. The ID needs to be unique across all of the components
in an app.
- accessKey (string; optional):
Keyboard shortcut to activate or add focus to the element.
- aria-* (string; optional):
A wildcard aria attribute.
- className (string; optional):
Often used with CSS to style elements with common properties.
- contentEditable (string; optional):
Indicates whether the element's content is editable.
- contextMenu (string; optional):
Defines the ID of a <menu> element which will serve as the
element's context menu.
- data-* (string; optional):
A wildcard data attribute.
- dir (string; optional):
Defines the text direction. Allowed values are ltr (Left-To-Right)
or rtl (Right-To-Left).
- draggable (string; optional):
Defines whether the element can be dragged.
- hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional):
Prevents rendering of given element, while keeping child elements,
e.g. script elements, active.
- key (string; optional):
A unique identifier for the component, used to improve performance
by React.js while rendering components See
https://reactjs.org/docs/lists-and-keys.html for more info.
- lang (string; optional):
Defines the language used in the element.
- loading_state (dict; optional):
Object that holds the loading state object coming from
dash-renderer.
`loading_state` is a dict with keys:
- component_name (string; optional):
Holds the name of the component that is loading.
- is_loading (boolean; optional):
Determines if the component is loading or not.
- prop_name (string; optional):
Holds which property is loading.
- n_clicks (number; default 0):
An integer that represents the number of times that this element
has been clicked on.
- n_clicks_timestamp (number; default -1):
An integer that represents the time (in ms since 1970) at which
n_clicks changed. This can be used to tell which button was
changed most recently.
- open (a value equal to: 'open', 'OPEN' | boolean; optional):
Indicates whether the the contents are currently visible (in the
case of a <details> element) or whether the dialog is active and
can be interacted with (in the case of a <dialog> element).
- role (string; optional):
The ARIA role attribute.
- spellCheck (string; optional):
Indicates whether spell checking is allowed for the element.
- style (dict; optional):
Defines CSS styles which will override styles previously set.
- tabIndex (string; optional):
Overrides the browser's default tab order and follows the one
specified instead.
- title (string; optional):
Text to be displayed in a tooltip when hovering over the element."""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, role=Component.UNDEFINED, open=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'accessKey', 'aria-*', 'className', 'contentEditable', 'contextMenu', 'data-*', 'dir', 'draggable', 'hidden', 'key', 'lang', 'loading_state', 'n_clicks', 'n_clicks_timestamp', 'open', 'role', 'spellCheck', 'style', 'tabIndex', 'title']
self._type = 'Details'
self._namespace = 'dash_html_components'
self._valid_wildcard_attributes = ['data-', 'aria-']
self.available_properties = ['children', 'id', 'accessKey', 'aria-*', 'className', 'contentEditable', 'contextMenu', 'data-*', 'dir', 'draggable', 'hidden', 'key', 'lang', 'loading_state', 'n_clicks', 'n_clicks_timestamp', 'open', 'role', 'spellCheck', 'style', 'tabIndex', 'title']
self.available_wildcard_properties = ['data-', 'aria-']
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Details, self).__init__(children=children, **args)
|
PypiClean
|
/kis-0.10.0.tar.gz/kis-0.10.0/keras_segmentation/data_utils/augmentation.py
|
import numpy as np
try:
import imgaug as ia
from imgaug import augmenters as iaa
except ImportError:
print("Error in loading augmentation, can't import imgaug."
"Please make sure it is installed.")
IMAGE_AUGMENTATION_SEQUENCE = None
IMAGE_AUGMENTATION_NUM_TRIES = 10
loaded_augmentation_name = ""
def _load_augmentation_aug_geometric():
return iaa.OneOf([
iaa.Sequential([iaa.Fliplr(0.5), iaa.Flipud(0.2)]),
iaa.CropAndPad(percent=(-0.05, 0.1),
pad_mode='constant',
pad_cval=(0, 255)),
iaa.Crop(percent=(0.0, 0.1)),
iaa.Crop(percent=(0.3, 0.5)),
iaa.Crop(percent=(0.3, 0.5)),
iaa.Crop(percent=(0.3, 0.5)),
iaa.Sequential([
iaa.Affine(
# scale images to 80-120% of their size,
# individually per axis
scale={"x": (0.8, 1.2), "y": (0.8, 1.2)},
# translate by -20 to +20 percent (per axis)
translate_percent={"x": (-0.2, 0.2), "y": (-0.2, 0.2)},
rotate=(-45, 45), # rotate by -45 to +45 degrees
shear=(-16, 16), # shear by -16 to +16 degrees
# use nearest neighbour or bilinear interpolation (fast)
order=[0, 1],
# if mode is constant, use a cval between 0 and 255
mode='constant',
cval=(0, 255),
# use any of scikit-image's warping modes
# (see 2nd image from the top for examples)
),
iaa.Sometimes(0.3, iaa.Crop(percent=(0.3, 0.5)))])
])
def _load_augmentation_aug_non_geometric():
return iaa.Sequential([
iaa.Sometimes(0.3, iaa.Multiply((0.5, 1.5), per_channel=0.5)),
iaa.Sometimes(0.2, iaa.JpegCompression(compression=(70, 99))),
iaa.Sometimes(0.2, iaa.GaussianBlur(sigma=(0, 3.0))),
iaa.Sometimes(0.2, iaa.MotionBlur(k=15, angle=[-45, 45])),
iaa.Sometimes(0.2, iaa.MultiplyHue((0.5, 1.5))),
iaa.Sometimes(0.2, iaa.MultiplySaturation((0.5, 1.5))),
iaa.Sometimes(0.34, iaa.MultiplyHueAndSaturation((0.5, 1.5),
per_channel=True)),
iaa.Sometimes(0.34, iaa.Grayscale(alpha=(0.0, 1.0))),
iaa.Sometimes(0.2, iaa.ChangeColorTemperature((1100, 10000))),
iaa.Sometimes(0.1, iaa.GammaContrast((0.5, 2.0))),
iaa.Sometimes(0.2, iaa.SigmoidContrast(gain=(3, 10),
cutoff=(0.4, 0.6))),
iaa.Sometimes(0.1, iaa.CLAHE()),
iaa.Sometimes(0.1, iaa.HistogramEqualization()),
iaa.Sometimes(0.2, iaa.LinearContrast((0.5, 2.0), per_channel=0.5)),
iaa.Sometimes(0.1, iaa.Emboss(alpha=(0, 1.0), strength=(0, 2.0)))
])
def _load_augmentation_aug_all2():
return iaa.Sequential([
iaa.Sometimes(0.65, _load_augmentation_aug_non_geometric()),
iaa.Sometimes(0.65, _load_augmentation_aug_geometric())
])
def _load_augmentation_aug_all():
""" Load image augmentation model """
def sometimes(aug):
return iaa.Sometimes(0.5, aug)
return iaa.Sequential(
[
# apply the following augmenters to most images
iaa.Fliplr(0.5), # horizontally flip 50% of all images
iaa.Flipud(0.2), # vertically flip 20% of all images
# crop images by -5% to 10% of their height/width
sometimes(iaa.CropAndPad(
percent=(-0.05, 0.1),
pad_mode='constant',
pad_cval=(0, 255)
)),
sometimes(iaa.Affine(
# scale images to 80-120% of their size, individually per axis
scale={"x": (0.8, 1.2), "y": (0.8, 1.2)},
# translate by -20 to +20 percent (per axis)
translate_percent={"x": (-0.2, 0.2), "y": (-0.2, 0.2)},
rotate=(-45, 45), # rotate by -45 to +45 degrees
shear=(-16, 16), # shear by -16 to +16 degrees
# use nearest neighbour or bilinear interpolation (fast)
order=[0, 1],
# if mode is constant, use a cval between 0 and 255
cval=(0, 255),
# use any of scikit-image's warping modes
# (see 2nd image from the top for examples)
mode='constant'
)),
# execute 0 to 5 of the following (less important) augmenters per
# image don't execute all of them, as that would often be way too
# strong
iaa.SomeOf((0, 5),
[
# convert images into their superpixel representation
sometimes(iaa.Superpixels(
p_replace=(0, 1.0), n_segments=(20, 200))),
iaa.OneOf([
# blur images with a sigma between 0 and 3.0
iaa.GaussianBlur((0, 3.0)),
# blur image using local means with kernel sizes
# between 2 and 7
iaa.AverageBlur(k=(2, 7)),
# blur image using local medians with kernel sizes
# between 2 and 7
iaa.MedianBlur(k=(3, 11)),
]),
iaa.Sharpen(alpha=(0, 1.0), lightness=(
0.75, 1.5)), # sharpen images
iaa.Emboss(alpha=(0, 1.0), strength=(
0, 2.0)), # emboss images
# search either for all edges or for directed edges,
# blend the result with the original image using a blobby mask
iaa.SimplexNoiseAlpha(iaa.OneOf([
iaa.EdgeDetect(alpha=(0.5, 1.0)),
iaa.DirectedEdgeDetect(
alpha=(0.5, 1.0), direction=(0.0, 1.0)),
])),
# add gaussian noise to images
iaa.AdditiveGaussianNoise(loc=0, scale=(
0.0, 0.05*255), per_channel=0.5),
iaa.OneOf([
# randomly remove up to 10% of the pixels
iaa.Dropout((0.01, 0.1), per_channel=0.5),
iaa.CoarseDropout((0.03, 0.15), size_percent=(
0.02, 0.05), per_channel=0.2),
]),
# invert color channels
iaa.Invert(0.05, per_channel=True),
# change brightness of images (by -10 to 10 of original value)
iaa.Add((-10, 10), per_channel=0.5),
# change hue and saturation
iaa.AddToHueAndSaturation((-20, 20)),
# either change the brightness of the whole image (sometimes
# per channel) or change the brightness of subareas
iaa.OneOf([
iaa.Multiply(
(0.5, 1.5), per_channel=0.5),
iaa.FrequencyNoiseAlpha(
exponent=(-4, 0),
first=iaa.Multiply(
(0.5, 1.5), per_channel=True),
second=iaa.ContrastNormalization(
(0.5, 2.0))
)
]),
# improve or worsen the contrast
iaa.ContrastNormalization((0.5, 2.0), per_channel=0.5),
iaa.Grayscale(alpha=(0.0, 1.0)),
# move pixels locally around (with random strengths)
sometimes(iaa.ElasticTransformation(
alpha=(0.5, 3.5), sigma=0.25)),
# sometimes move parts of the image around
sometimes(iaa.PiecewiseAffine(scale=(0.01, 0.05))),
sometimes(iaa.PerspectiveTransform(scale=(0.01, 0.1)))
],
random_order=True
)
],
random_order=True
)
augmentation_functions = {
"aug_all": _load_augmentation_aug_all,
"aug_all2": _load_augmentation_aug_all2,
"aug_geometric": _load_augmentation_aug_geometric,
"aug_non_geometric": _load_augmentation_aug_non_geometric
}
def _load_augmentation(augmentation_name="aug_all"):
global IMAGE_AUGMENTATION_SEQUENCE
if augmentation_name not in augmentation_functions:
raise ValueError("Augmentation name not supported")
IMAGE_AUGMENTATION_SEQUENCE = augmentation_functions[augmentation_name]()
def _augment_seg(img, seg, augmentation_name="aug_all"):
global loaded_augmentation_name
if (not IMAGE_AUGMENTATION_SEQUENCE) or\
(augmentation_name != loaded_augmentation_name):
_load_augmentation(augmentation_name)
loaded_augmentation_name = augmentation_name
# Create a deterministic augmentation from the random one
aug_det = IMAGE_AUGMENTATION_SEQUENCE.to_deterministic()
# Augment the input image
image_aug = aug_det.augment_image(img)
segmap = ia.SegmentationMapOnImage(
seg, nb_classes=np.max(seg) + 1, shape=img.shape)
segmap_aug = aug_det.augment_segmentation_maps(segmap)
segmap_aug = segmap_aug.get_arr_int()
return image_aug, segmap_aug
def _try_n_times(fn, n, *args, **kargs):
""" Try a function N times """
attempts = 0
while attempts < n:
try:
return fn(*args, **kargs)
except Exception:
attempts += 1
return fn(*args, **kargs)
def augment_seg(img, seg, augmentation_name="aug_all"):
return _try_n_times(_augment_seg, IMAGE_AUGMENTATION_NUM_TRIES,
img, seg, augmentation_name=augmentation_name)
|
PypiClean
|
/lino-xl-23.9.1.tar.gz/lino-xl-23.9.1/lino_xl/lib/extensible/static/extensible-1.0.1/examples/calendar/remote.js
|
Ext.onReady(function(){
var today = new Date().clearTime();
apiRoot = 'remote/php/app.php/events/';
Ext.Msg.minWidth = 300;
// Let's load the calendar store remotely also. All you have to do to get
// color-coding is include this store with the CalendarPanel.
var calendarStore = new Ext.data.JsonStore({
storeId: 'calendarStore',
url: 'data/calendars.json',
root: 'calendars',
idProperty: Ext.ensible.cal.CalendarMappings.CalendarId.mapping || 'id',
fields: Ext.ensible.cal.CalendarRecord.prototype.fields.getRange(),
remoteSort: true,
sortInfo: {
field: Ext.ensible.cal.CalendarMappings.Title.name,
direction: 'ASC'
}
});
// Make sure this loads first so that the calendar records are available
// when the event store loads and triggers the view to render
calendarStore.load();
var proxy = new Ext.data.HttpProxy({
disableCaching: false, // no need for cache busting when loading via Ajax
api: {
read: apiRoot+'view',
create: apiRoot+'create',
update: apiRoot+'update',
destroy: apiRoot+'destroy'
},
listeners: {
exception: function(proxy, type, action, o, res, arg){
var msg = res.message ? res.message : Ext.decode(res.responseText).message;
// ideally an app would provide a less intrusive message display
Ext.Msg.alert('Server Error', msg);
}
}
});
var reader = new Ext.data.JsonReader({
totalProperty: 'total',
successProperty: 'success',
idProperty: 'id',
root: 'data',
messageProperty: 'message',
fields: Ext.ensible.cal.EventRecord.prototype.fields.getRange()
});
var writer = new Ext.data.JsonWriter({
encode: true,
writeAllFields: false
});
var store = new Ext.ensible.cal.EventStore({
id: 'event-store',
restful: true,
proxy: proxy,
reader: reader,
writer: writer,
// the view will automatically set start / end date params for you. You can
// also pass a valid config object as specified by Ext.data.Store.load()
// and the start / end params will be appended to it.
autoLoad: true,
// It's easy to provide generic CRUD messaging without having to handle events on every individual view.
// Note that while the store provides individual add, update and remove events, those fire BEFORE the
// remote transaction returns from the server -- they only signify that records were added to the store,
// NOT that your changes were actually persisted correctly in the back end. The 'write' event is the best
// option for generically messaging after CRUD persistence has succeeded.
listeners: {
'write': function(store, action, data, resp, rec){
var title = Ext.value(rec.data[Ext.ensible.cal.EventMappings.Title.name], '(No title)');
switch(action){
case 'create':
Ext.ensible.sample.msg('Add', 'Added "' + title + '"');
break;
case 'update':
Ext.ensible.sample.msg('Update', 'Updated "' + title + '"');
break;
case 'destroy':
Ext.ensible.sample.msg('Delete', 'Deleted "' + title + '"');
break;
}
}
}
});
var cp = new Ext.ensible.cal.CalendarPanel({
id: 'calendar-remote',
eventStore: store,
calendarStore: calendarStore,
renderTo: 'cal',
title: 'Remote Calendar',
width: 900,
height: 700
});
// You can optionally call load() here if you prefer instead of using the
// autoLoad config. Note that as long as you call load AFTER the store
// has been passed into the CalendarPanel the default start and end date parameters
// will be set for you automatically (same thing with autoLoad:true). However, if
// you call load manually BEFORE the store has been passed into the CalendarPanel
// it will call the remote read method without any date parameters, which is most
// likely not what you'll want.
// store.load({ ... });
var errorCheckbox = Ext.get('forceError');
var setRemoteErrorMode = function(){
if(errorCheckbox.dom.checked){
// force an error response to test handling of CUD (not R) actions. this param is
// only implemented in the back end code for this sample -- it's not default behavior.
store.setBaseParam('fail', true);
cp.setTitle('Remote Calendar <span id="errTitle">(Currently in remote error mode)</span>');
}
else{
delete store.baseParams['fail'];
cp.setTitle('Remote Calendar');
}
};
setRemoteErrorMode();
errorCheckbox.on('click', setRemoteErrorMode);
});
|
PypiClean
|
/SAPL_Base-0.3.5-py3-none-any.whl/sapl_base/authorization_subscriptions.py
|
import json
class AuthorizationSubscription:
"""
AuthorizationSubscriptions are sent to the PolicyDecisionPoint and based on their data a Decision is returned.
"""
def __init__(self, subject, action, resource, environment=None,
subscription_id: int = None):
"""
Create an AuthorizationSubscription Object with the given arguments
:param subject: Subject, which describes for whom a Decision shall be made. This can be the logged in User for example
:param action: For which action shall a Decision be made? This can be a Requesttype like GET or POST for example
:param resource: The resources for which a Decision shall be made. This can be for example the parameter of a GET request
:param environment: Optional argument, which describes the environment. This can be for example the current time or location
:param subscription_id: ID for the object. If no ID is provided an ID is created
"""
if not (isinstance(subscription_id, int) or subscription_id is None):
raise TypeError(
f"subscription_id must be an int, was {subscription_id} type of {subscription_id.__class__}")
self.subject = subject if subject else {}
self.action = action if action else {}
self.resource = resource if resource else {}
if environment is not None:
self.environment = environment
if subscription_id is not None:
self.subscription_id = subscription_id
else:
self.subscription_id = id(self)
def __repr__(self):
"""
representative of the object.
"""
dictionary = self.__dict__.copy()
representative = ",".join(element + "=" + repr(dictionary.get(element)) for element in dictionary)
return f"{type(self).__name__}({representative})"
def __str__(self):
"""
Sting representation returns this object in json format as a string
"""
dictionary = self.__dict__.copy()
dictionary.pop("subscription_id")
return json.dumps(dictionary, indent=2, skipkeys=True, default=lambda o: str(o))
class MultiSubscription:
"""
Multiple AuthorizationSubscriptions can be gathered in a MultiSubscription, which can be sent to a
PolicyDecisionPoint.The PDP will create individuell Decisions for each AuthorizationSubscription, but only one
request is needed for all Decisions.
"""
def __init__(
self, subject=None, action=None, resource=None,
environment=None,
authorization_subscriptions=None,
):
if subject is not None:
self.subject = subject
if action is not None:
self.action = action
if resource is not None:
self.resource = resource
if environment is not None:
self.environment = environment
self.authorization_subscriptions = authorization_subscriptions
def __repr__(self):
"""
representative of the object.
"""
representative = ",".join(element + "=" + repr(self.__dict__.get(element)) for element in self.__dict__)
return f"{type(self).__name__}({representative})"
def __str__(self):
"""
Sting representation returns this object in json format as a string
"""
return json.dumps(self.__dict__, indent=2, skipkeys=True, default=lambda o: str(o))
|
PypiClean
|
/ingestion-api-1.3.4.tar.gz/ingestion-api-1.3.4/ingestion/configuration.py
|
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "https://ingestion.mint.isi.edu/v1.2.0"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("ingestion")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Proxy headers
self.proxy_headers = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
# Adding retries to override urllib3 default value 3
self.retries = None
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 1.2.0\n"\
"SDK Package Version: 1.3.4".\
format(env=sys.platform, pyversion=sys.version)
def get_host_settings(self):
"""Gets an array of host settings
:return: An array of host settings
"""
return [
{
'url': "https://ingestion.mint.isi.edu/v1.2.0",
'description': "No description provided",
},
{
'url': "http://localhost:8080/v1.2.0",
'description': "No description provided",
}
]
def get_host_from_settings(self, index, variables={}):
"""Gets host URL based on the index and variables
:param index: array index of the host settings
:param variables: hash of variable and the corresponding value
:return: URL based on host settings
"""
servers = self.get_host_settings()
# check array index out of bound
if index < 0 or index >= len(servers):
raise ValueError(
"Invalid index {} when selecting the host settings. Must be less than {}" # noqa: E501
.format(index, len(servers)))
server = servers[index]
url = server['url']
# go through variable and assign a value
for variable_name in server['variables']:
if variable_name in variables:
if variables[variable_name] in server['variables'][
variable_name]['enum_values']:
url = url.replace("{" + variable_name + "}",
variables[variable_name])
else:
raise ValueError(
"The variable `{}` in the host URL has invalid value {}. Must be {}." # noqa: E501
.format(
variable_name, variables[variable_name],
server['variables'][variable_name]['enum_values']))
else:
# use default value
url = url.replace(
"{" + variable_name + "}",
server['variables'][variable_name]['default_value'])
return url
|
PypiClean
|
/Products.SilvaPoll-3.0.tar.gz/Products.SilvaPoll-3.0/Products/SilvaPoll/ServicePollsMySQL.py
|
from AccessControl import ClassSecurityInfo
from App.class_init import InitializeClass
from Products.SilvaPoll.ServicePolls import ServicePolls
from Products.SilvaPoll.sqldb import SQLDB
class ServicePollsMySQL(ServicePolls):
"""Service that manages poll data
"""
security = ClassSecurityInfo()
meta_type = 'Silva Poll Service SQL'
_sqlite = False
def __init__(self, id, sqlite=False):
super(ServicePollsMySQL, self).__init__(id)
self._sqlite = sqlite
def _get_database(self):
return SQLDB('service_polls_db', 'UTF-8')
def _init_database(self):
db = self._get_database()
try:
db.getSQLData(self, u'SELECT * FROM question')
except:
self._create_tables(db)
def _create_tables(self, db):
if self._sqlite:
db.getSQLData(self, (
u"""CREATE TABLE question ("""
"""id INTEGER PRIMARY KEY AUTOINCREMENT, """
"""question TEXT NOT NULL)"""))
db.getSQLData(self, (
u"""CREATE TABLE answer ("""
"""id INTEGER PRIMARY KEY AUTOINCREMENT, """
"""qid INTEGER NOT NULL, """
"""answer TEXT NOT NULL, """
"""votes INTEGER DEFAULT 0 NOT NULL)"""))
else:
db.getSQLData(self, (
u"""CREATE TABLE question ("""
"""id BIGINT NOT NULL AUTO_INCREMENT, """
"""question TEXT NOT NULL, """
"""PRIMARY KEY (id))"""))
db.getSQLData(self, (
u"""CREATE TABLE answer ("""
"""id BIGINT NOT NULL AUTO_INCREMENT, """
"""qid BIGINT NOT NULL, """
"""answer TEXT NOT NULL, """
"""votes BIGINT DEFAULT 0 NOT NULL, """
"""PRIMARY KEY(id), """
"""INDEX(qid))"""))
def create_question(self, question, answers):
db = self._get_database()
db.getSQLData(self,
u"INSERT INTO question (question) VALUES ('%(question)s')",
{'question': question})
idres = db.getSQLData(self, u'SELECT LAST_INSERT_ID() as id')
id = idres[0]['id']
for i, answer in enumerate(answers):
query = (u"INSERT INTO answer (qid, answer, votes) VALUES "
"(%(qid)s, '%(answer)s', '%(votes)s')")
db.getSQLData(self, query, {'qid': id, 'answer': answer,
'votes': '0'})
return id
def get_question(self, qid):
db = self._get_database()
res = db.getSQLData(self,
u'SELECT * FROM question WHERE id=%(id)s', {'id': qid})
return res[0]['question']
def set_question(self, qid, question):
db = self._get_database()
db.getSQLData(self,
u"UPDATE question SET question='%(question)s' WHERE id=%(id)s",
{'question': question, 'id': qid})
def get_answers(self, qid):
db = self._get_database()
res = db.getSQLData(self,
u'SELECT answer FROM answer WHERE qid=%(id)s ORDER BY id',
{'id': qid})
ret = [r['answer'] for r in res]
return ret
def set_answers(self, qid, answers):
db = self._get_database()
curranswers = self.get_answers(qid)
if curranswers and len(curranswers) == len(answers):
# this is kinda nasty: first get the ids of the answers, then (in
# order!) update the rows
res = db.getSQLData(self,
u"SELECT id FROM answer WHERE qid=%(id)s ORDER BY id", {'id': qid})
for i, id in enumerate([r['id'] for r in res]):
db.getSQLData(self,
u"UPDATE answer SET answer='%(answer)s' where id=%(id)s",
{'id': id, 'answer': answers[i]})
else:
# drop any existing rows
db.getSQLData(self, u'DELETE FROM answer WHERE qid=%(qid)s',
{'qid': qid})
for answer in answers:
db.getSQLData(self,
(u"INSERT INTO answer (qid, answer) VALUES (%(qid)s, "
"'%(answer)s')"), {'qid': qid, 'answer': answer})
def get_votes(self, qid):
db = self._get_database()
res = db.getSQLData(self,
u'SELECT votes FROM answer WHERE qid=%(id)s', {'id': qid})
return [int(r['votes']) for r in res]
def vote(self, qid, index):
# kinda nasty too, similar problem: we first get all answer rows to
# find out what answer has index <index>, then do the update
db = self._get_database()
res = db.getSQLData(self,
u"SELECT id, votes FROM answer WHERE qid=%(id)s", {'id': qid})
idvotes = [(r['id'], int(r['votes'])) for r in res]
idvotesindex = idvotes[index]
db.getSQLData(self,
u"UPDATE answer SET votes=%(votes)s WHERE id=%(id)s",
{'id': idvotesindex[0], 'votes': idvotesindex[1] + 1})
InitializeClass(ServicePollsMySQL)
|
PypiClean
|
/infomedia-python-1.2.tar.gz/infomedia-python-1.2/src/infomedia/hash2cfg/defines.py
|
##
## This file is part of infomedia framework library
##
## Copyright (c) 2011-2012 Infomedia Foundation
##
## Author: Emmanuele Somma (emmanuele_DOT_somma_AT_infomedia_DOT_it)
##
## Any parts of this program derived from this project,
## or contributed by third-party developers are copyrighted by their
## respective authors.
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
##
##
import re
from os import getenv,environ
from datetime import datetime
from pprint import pprint
from os.path import (basename, expanduser, join,
dirname, exists, expandvars,
isdir)
from infomedia.collections import vdict,udict,logdict
from infomedia.options import Options
import logging
logger=logging.getLogger(__name__)
# defines, functions, processor and macro
_date = datetime.now()
# DEFINES
DEFAULT_DEFINES = {
'THISMONTH': str(_date.month),
'THISYEAR': str(_date.year),
'TYEAR': str(_date.year)[2:],
'PREVYEAR': str(_date.year-1),
'PREVVYEAR': str(_date.year-2),
'PREVVVYEAR': str(_date.year-3),
'NEXTYEAR': str(_date.year+1),
'SRIDATA': getenv('SRIDATA'),
'YUPD': str(0),
'UPDATE': str(0),
'DONT_UPDATE':str(1),
'NUM': str(0),
'DATE': _date.strftime('%Y%m%d'),
'THISDATE': _date.strftime('%d/%m/%Y'),
'LONG_DATE': _date.strftime('%d %B %Y'),
'DS_DATE': _date.strftime('%Y-%m-%d')
}
## FUNCTIONS IN TABLE DEFINITION
##
mnth = [ '_(Jan)', '_(Feb)', '_(Mar)',
'_(Apr)', '_(May)', '_(Jun)',
'_(Jul)', '_(Aug)', '_(Sep)',
'_(Oct)', '_(Nov)', '_(Dec)' ]
macro_last_year = False
def macro_month(*args):
"""month from 0->Jan to 11->Dec"""
ev = int(args[0])-1
return (mnth[ev%12],1,"c")
def macro_fyear(*args):
"""FYEAR(YEAR,MONTH)
returns
\multicolumn{n}{c}{$THISYEAR}
or
empty if
"""
global macro_last_year
month = int(args[1])-1
year = int(args[0]) - ( 1 if month < 0 else 0 )
remain= int(args[2])
_date = datetime.now()
Y = _date.year
M = _date.month
logger.debug("{FYEAR} m/d - remain - M/Y = %d/%d - %d - %d/%d",
month,year,remain,M,Y)
if year==Y:
macro_last_year = False
if month == 0:
# emit a multicolumn for remaining months
if remain == 1:
return (str(year),1,"c")
else:
return (str(year),remain,"c")
else:
if macro_last_year == False:
macro_last_year = True
return (str(year),-month,"c")
return None
def macro_get(name,*args):
# logger.debug('{MACRO GET} %s > %s',name,args)
n = 0
if args and args[0]:
m = re.match('^(-?[0-9]+)$',args[0])
if m:
n = int(m.group(1))
m = re.match('^[A-Z][A-Z0-9_]*$',name)
if m:
if dataset.has_key(name):
TS = dataset[name]
return (unicode("%.1f"%TS.data[n]),1,"c")
def macro_getdate(name,*args):
# logger.debug('{MACRO GET} %s > %s',name,args)
n = 0
if args and args[0]:
m = re.match('^(-?[0-9]+)$',args[0])
if m:
n = int(m.group(1))
m = re.match('^[A-Z][A-Z0-9_]*$',name)
if m:
if dataset.has_key(name):
TS = dataset[name]
return (unicode(TS.dates[n]),1,"c")
macro_fncs = { 'MONTH': macro_month ,
'FYEAR': macro_fyear ,
'GET': macro_get ,
'GETDATE': macro_getdate ,
}
name = None
spec = None
dataset = None
options = None
def macro_rerepl(matchobj):
g = [ _m for _m in matchobj.groups()]
f= g.pop(0)
res = macro_fncs[f](*g)
if res:
(msg,n,c) = res
return "#%s#%s#%s#" % (msg,n,c)
def expandfuncs(string,_name=None,_spec=None,_dataset=None,_options=None,unfold=False):
global name, spec, dataset, options
if isinstance(string,(tuple,list)):
return string
# expandlistfunc(string,_name,_spec,_dataset,_options,unfold)
string=unicode(string)
if _name: name = _name
if _spec: spec = _spec
if _dataset: dataset = _dataset
if _options: options = _options
# logger.debug('{EXPANDFUNCS} W:%s N:%s S:%s D:%s O:%s',string,type(name),type(spec),type(dataset),type(options))
_ttt = unicode(string)
if len(_ttt)==0:
return string
if '%' not in string:
return _ttt
# logger.debug('{EXPANDFUNCS} 2:%s',_ttt)
m = re.search('%([A-Z]+)\(([^),]+)(,[^,)])*\)',_ttt,re.IGNORECASE)
if m:
# logger.debug('{EXPANDFUNCS} M:%s',m.groups())
_tto=_ttt
while '%' in _ttt:
_ttt=re.sub('%([A-Z]+)\(([^,)]+)(?:,([^,)]+))?(?:,([^,)]+))?\)',
macro_rerepl,
_ttt,
re.IGNORECASE)
if _ttt is None or _ttt == _tto:
break
_tto=_ttt
if _ttt:
if unfold:
_ttt=re.sub('#([^#]+)#[^#]+#[^#]+#',
'\\1',
_ttt,
re.IGNORECASE)
logger.debug("EXPANDFUN- %s --> %s",string,_ttt)
return _ttt
def expandlistfunc(string,_name=None,_spec=None,_dataset=None,_options=None,unfold=False):
return [ expandfunc(x,_name,_spec,_dataset,_options,unfold) for x in string ]
class DefineSet(udict):
# _we_are_one = {}
def __init__(self,indict=None):
udict.__init__(self)
self._varprog = None
self.setUp()
if indict:
self.update(indict)
def report(self):
s=""
for k,v in self.items():
s+="%s=%s\n"%(k,v)
return s
def update(self,indict):
h = udict(indict)
vdict.update(self,h)
if hasattr(indict,'_vaprog'):
self._varprog = indict._vaprog
else:
import re
self._varprog = re.compile(r'\$(\w+|\{[^}]*\})')
def setUp(self):
self.update(environ)
self.update(DEFAULT_DEFINES)
self._varprog = None
def update_from_strings(self,definitions):
if definitions:
self.update(udict([ v.split('=') for v in definitions]))
def expand(self,path):
"""Expand parameters of form $var and ${var}. Unknown variables
are left unchanged.
@param path: path string to expand
@type path: unicode
@returns: the path string with parameters expanded
"""
# logdict(logger.debug,'{EXPANDDEFINES} for path %s'%path,self)
if len(self)==0:
self.setUp()
if isinstance(path,udict):
return udict([ (k,self.expand(unicode(v))) for k,v in path.items() ])
if '$' not in path:
return path
if not self._varprog:
import re
self._varprog = re.compile(r'\$(\w+|\{[^}]*\})')
i = 0
while True:
m = self._varprog.search(path, i)
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name.startswith('{') and name.endswith('}'):
name = name[1:-1]
if name in self:
tail = path[j:]
path = path[:i] + unicode(self[name])
i = len(path)
path += tail
else:
i = j
return path
class MacroSet(udict):
pass
define_set = DefineSet()
def expansion(kk):
### Questo applica di nuovo l'espansione
pkk = None
if '$' in kk:
kk=define_set.expand(kk)
def rerepl(matchobj):
g=matchobj.group(1)
ev = eval(g)
return unicode(ev)
while '((' in kk:
kk=re.sub('\(\(([-+*/0-9. ]+)\)\)',
rerepl,
kk)
if kk == pkk:
break
pkk = kk
return kk
__all__ = """
expandfuncs
expandlistfunc
define_set
expansion
DEFAULT_DEFINES
""".split()
|
PypiClean
|
/django-jutil-3.12.5.tar.gz/django-jutil-3.12.5/jutil/files.py
|
import os
from typing import List
from django.utils.translation import gettext as _
from jutil.format import is_media_full_path, strip_media_root
def list_files(dir_name: str, suffix: str = "", ignore_case: bool = True, use_media_root: bool = False, recurse: bool = False) -> List[str]:
"""Lists all files under specified directory.
Optionally filter files by suffix and recurse to subdirectories.
Args:
dir_name: Directory path
suffix: Case-sensitive suffix (optional)
ignore_case: Case-insensitive suffix. Default is True.
use_media_root: Instead of full path return files relative to media root.
recurse: Recurse subdirectories (optional)
Returns:
List of file names found
"""
if not os.path.isdir(dir_name):
raise ValueError(_("{} is not a directory").format(dir_name))
dir_full_path = os.path.abspath(dir_name)
if use_media_root and not is_media_full_path(dir_full_path):
raise ValueError(_("{} is not under MEDIA_ROOT"))
if suffix:
if not suffix.startswith("."):
suffix = "." + suffix
if ignore_case:
suffix = suffix.lower()
out: List[str] = []
for ent in os.scandir(dir_full_path):
assert isinstance(ent, os.DirEntry)
if ent.is_file():
name = ent.name
if suffix and ignore_case:
name = name.lower()
if not suffix or name.endswith(suffix):
file_path = strip_media_root(ent.path) if use_media_root else os.path.abspath(ent.path)
out.append(file_path)
elif recurse and ent.is_dir() and ent.name != "." and ent.name != "..":
out.extend(list_files(ent.path, suffix=suffix, ignore_case=ignore_case, use_media_root=use_media_root, recurse=recurse))
return out
def find_file(filename: str, dir_name: str = ".", use_media_root: bool = False, recurse: bool = False) -> List[str]:
"""Finds file under specified directory.
Optionally filter files by suffix and recurse to subdirectories.
Args:
filename: File name to find. You can also specify relative paths e.g. "en/LC_MESSAGES/django.po"
dir_name: Directory path. Default '.'
use_media_root: Instead of full path return files relative to media root.
recurse: Recurse subdirectories (optional)
Returns:
List of file names found
"""
if not os.path.isdir(dir_name):
raise ValueError(_("{} is not a directory").format(dir_name))
dir_full_path = os.path.abspath(dir_name)
if use_media_root and not is_media_full_path(dir_full_path):
raise ValueError(_("{} is not under MEDIA_ROOT"))
out: List[str] = []
if "/" not in filename:
filename = "/" + filename
for ent in os.scandir(dir_full_path):
assert isinstance(ent, os.DirEntry)
if ent.is_file():
full_path = str(os.path.abspath(ent.path))
if full_path.endswith(filename):
file_path = strip_media_root(full_path) if use_media_root else full_path
out.append(file_path)
elif recurse and ent.is_dir() and ent.name != "." and ent.name != "..":
out.extend(find_file(filename, dir_name=ent.path, use_media_root=use_media_root, recurse=recurse))
return out
|
PypiClean
|
/ztfy.myams-0.1.33.tar.gz/ztfy.myams-0.1.33/src/ztfy/myams/resources/js/ext/bootstrap-treeview.min.js
|
!function(e,t,o,s){"use strict";var n={};n.settings={injectStyle:!0,levels:2,expandIcon:"glyphicon glyphicon-plus",collapseIcon:"glyphicon glyphicon-minus",emptyIcon:"glyphicon",nodeIcon:"",selectedIcon:"",checkedIcon:"glyphicon glyphicon-check",uncheckedIcon:"glyphicon glyphicon-unchecked",color:void 0,backColor:void 0,borderColor:void 0,onhoverColor:"#F5F5F5",selectedColor:"#FFFFFF",selectedBackColor:"#428bca",unselectableBackColor:void 0,searchResultColor:"#D9534F",searchResultBackColor:void 0,enableLinks:!1,highlightSelected:!0,highlightSearchResults:!0,showBorder:!0,showIcon:!0,showCheckbox:!1,showTags:!1,toggleUnselectable:!0,multiSelect:!1,onNodeChecked:void 0,onNodeCollapsed:void 0,onNodeDisabled:void 0,onNodeEnabled:void 0,onNodeExpanded:void 0,onNodeSelected:void 0,onNodeUnchecked:void 0,onNodeUnselected:void 0,onSearchComplete:void 0,onSearchCleared:void 0},n.options={silent:!1,ignoreChildren:!1},n.searchOptions={ignoreCase:!0,exactMatch:!1,revealResults:!0};var i=function(t,o){return this.$element=e(t),this.elementId=t.id,this.styleId=this.elementId+"-style",this.init(o),{options:this.options,init:e.proxy(this.init,this),remove:e.proxy(this.remove,this),getNode:e.proxy(this.getNode,this),getParent:e.proxy(this.getParent,this),getSiblings:e.proxy(this.getSiblings,this),getSelected:e.proxy(this.getSelected,this),getUnselected:e.proxy(this.getUnselected,this),getExpanded:e.proxy(this.getExpanded,this),getCollapsed:e.proxy(this.getCollapsed,this),getChecked:e.proxy(this.getChecked,this),getUnchecked:e.proxy(this.getUnchecked,this),getDisabled:e.proxy(this.getDisabled,this),getEnabled:e.proxy(this.getEnabled,this),selectNode:e.proxy(this.selectNode,this),unselectNode:e.proxy(this.unselectNode,this),toggleNodeSelected:e.proxy(this.toggleNodeSelected,this),collapseAll:e.proxy(this.collapseAll,this),collapseNode:e.proxy(this.collapseNode,this),expandAll:e.proxy(this.expandAll,this),expandNode:e.proxy(this.expandNode,this),toggleNodeExpanded:e.proxy(this.toggleNodeExpanded,this),revealNode:e.proxy(this.revealNode,this),checkAll:e.proxy(this.checkAll,this),checkNode:e.proxy(this.checkNode,this),uncheckAll:e.proxy(this.uncheckAll,this),uncheckNode:e.proxy(this.uncheckNode,this),toggleNodeChecked:e.proxy(this.toggleNodeChecked,this),disableAll:e.proxy(this.disableAll,this),disableNode:e.proxy(this.disableNode,this),enableAll:e.proxy(this.enableAll,this),enableNode:e.proxy(this.enableNode,this),toggleNodeDisabled:e.proxy(this.toggleNodeDisabled,this),search:e.proxy(this.search,this),clearSearch:e.proxy(this.clearSearch,this)}};i.prototype.init=function(t){this.tree=[],this.nodes=[],t.data&&("string"==typeof t.data&&(t.data=e.parseJSON(t.data)),this.tree=e.extend(!0,[],t.data),delete t.data),this.options=e.extend({},n.settings,t),this.destroy(),this.subscribeEvents(),this.setInitialStates({nodes:this.tree},0),this.render()},i.prototype.remove=function(){this.destroy(),e.removeData(this,"treeview"),e("#"+this.styleId).remove()},i.prototype.destroy=function(){this.initialized&&(this.$wrapper.remove(),this.$wrapper=null,this.unsubscribeEvents(),this.initialized=!1)},i.prototype.unsubscribeEvents=function(){this.$element.off("click"),this.$element.off("nodeChecked"),this.$element.off("nodeCollapsed"),this.$element.off("nodeDisabled"),this.$element.off("nodeEnabled"),this.$element.off("nodeExpanded"),this.$element.off("nodeSelected"),this.$element.off("nodeUnchecked"),this.$element.off("nodeUnselected"),this.$element.off("searchComplete"),this.$element.off("searchCleared")},i.prototype.subscribeEvents=function(){this.unsubscribeEvents(),this.$element.on("click",e.proxy(this.clickHandler,this)),"function"==typeof this.options.onNodeChecked&&this.$element.on("nodeChecked",this.options.onNodeChecked),"function"==typeof this.options.onNodeCollapsed&&this.$element.on("nodeCollapsed",this.options.onNodeCollapsed),"function"==typeof this.options.onNodeDisabled&&this.$element.on("nodeDisabled",this.options.onNodeDisabled),"function"==typeof this.options.onNodeEnabled&&this.$element.on("nodeEnabled",this.options.onNodeEnabled),"function"==typeof this.options.onNodeExpanded&&this.$element.on("nodeExpanded",this.options.onNodeExpanded),"function"==typeof this.options.onNodeSelected&&this.$element.on("nodeSelected",this.options.onNodeSelected),"function"==typeof this.options.onNodeUnchecked&&this.$element.on("nodeUnchecked",this.options.onNodeUnchecked),"function"==typeof this.options.onNodeUnselected&&this.$element.on("nodeUnselected",this.options.onNodeUnselected),"function"==typeof this.options.onSearchComplete&&this.$element.on("searchComplete",this.options.onSearchComplete),"function"==typeof this.options.onSearchCleared&&this.$element.on("searchCleared",this.options.onSearchCleared)},i.prototype.setInitialStates=function(t,o){if(t.nodes){o+=1;var s=t,n=this;e.each(t.nodes,function(e,t){t.nodeId=n.nodes.length,t.parentId=s.nodeId,t.hasOwnProperty("selectable")||(t.selectable=!0),t.state=t.state||{},t.state.hasOwnProperty("checked")||(t.state.checked=!1),t.state.hasOwnProperty("disabled")||(t.state.disabled=!1),t.state.hasOwnProperty("expanded")||(!t.state.disabled&&o<n.options.levels&&t.nodes&&t.nodes.length>0?t.state.expanded=!0:t.state.expanded=!1),t.state.hasOwnProperty("selected")||(t.state.selected=!1),n.nodes.push(t),t.nodes&&n.setInitialStates(t,o)})}},i.prototype.clickHandler=function(t){this.options.enableLinks||t.preventDefault();var o=e(t.target),s=this.findNode(o);if(s&&!s.state.disabled){var i=o.attr("class")?o.attr("class").split(" "):[];-1!==i.indexOf("expand-icon")?(this.toggleExpandedState(s,n.options),this.render()):-1!==i.indexOf("check-icon")?(this.toggleCheckedState(s,n.options),this.render()):(s.selectable?this.toggleSelectedState(s,n.options):this.options.toggleUnselectable&&this.toggleExpandedState(s,n.options),this.render())}},i.prototype.findNode=function(e){var t=e.closest("li.list-group-item").attr("data-nodeid"),o=this.nodes[t];return o||console.log("Error: node does not exist"),o},i.prototype.toggleExpandedState=function(e,t){e&&this.setExpandedState(e,!e.state.expanded,t)},i.prototype.setExpandedState=function(t,o,s){o!==t.state.expanded&&(o&&t.nodes?(t.state.expanded=!0,s.silent||this.$element.trigger("nodeExpanded",e.extend(!0,{},t))):o||(t.state.expanded=!1,s.silent||this.$element.trigger("nodeCollapsed",e.extend(!0,{},t)),t.nodes&&!s.ignoreChildren&&e.each(t.nodes,e.proxy(function(e,t){this.setExpandedState(t,!1,s)},this))))},i.prototype.toggleSelectedState=function(e,t){e&&this.setSelectedState(e,!e.state.selected,t)},i.prototype.setSelectedState=function(t,o,s){o!==t.state.selected&&(o?(this.options.multiSelect||e.each(this.findNodes("true","g","state.selected"),e.proxy(function(e,t){this.setSelectedState(t,!1,s)},this)),t.state.selected=!0,s.silent||this.$element.trigger("nodeSelected",e.extend(!0,{},t))):(t.state.selected=!1,s.silent||this.$element.trigger("nodeUnselected",e.extend(!0,{},t))))},i.prototype.toggleCheckedState=function(e,t){e&&this.setCheckedState(e,!e.state.checked,t)},i.prototype.setCheckedState=function(t,o,s){o!==t.state.checked&&(o?(t.state.checked=!0,s.silent||this.$element.trigger("nodeChecked",e.extend(!0,{},t))):(t.state.checked=!1,s.silent||this.$element.trigger("nodeUnchecked",e.extend(!0,{},t))))},i.prototype.setDisabledState=function(t,o,s){o!==t.state.disabled&&(o?(t.state.disabled=!0,this.setExpandedState(t,!1,s),this.setSelectedState(t,!1,s),this.setCheckedState(t,!1,s),s.silent||this.$element.trigger("nodeDisabled",e.extend(!0,{},t))):(t.state.disabled=!1,s.silent||this.$element.trigger("nodeEnabled",e.extend(!0,{},t))))},i.prototype.render=function(){this.initialized||(this.$element.addClass("treeview"),this.$wrapper=e(this.template.list),this.injectStyle(),this.initialized=!0),this.$element.empty().append(this.$wrapper.empty()),this.buildTree(this.tree,0)},i.prototype.buildTree=function(t,o){if(t){o+=1;var s=this;e.each(t,function(t,n){for(var i=e(s.template.item).addClass("node-"+s.elementId).addClass(n.state.checked?"node-checked":"").addClass(n.state.disabled?"node-disabled":"").addClass(n.state.selected?"node-selected":"").addClass(n.searchResult?"search-result":"").attr("data-nodeid",n.nodeId).attr("style",s.buildStyleOverride(n)),d=0;d<o-1;d++)i.append(s.template.indent);r=[];if(n.nodes?(r.push("expand-icon"),n.state.expanded?r.push(s.options.collapseIcon):r.push(s.options.expandIcon)):r.push(s.options.emptyIcon),i.append(e(s.template.icon).addClass(r.join(" "))),s.options.showIcon&&((r=["node-icon"]).push(n.icon||s.options.nodeIcon),n.state.selected&&(r.pop(),r.push(n.selectedIcon||s.options.selectedIcon||n.icon||s.options.nodeIcon)),i.append(e(s.template.icon).addClass(r.join(" ")))),s.options.showCheckbox){var r=["check-icon"];n.state.checked?r.push(s.options.checkedIcon):r.push(s.options.uncheckedIcon),i.append(e(s.template.icon).addClass(r.join(" ")))}if(s.options.enableLinks?i.append(e(s.template.link).attr("href",n.href).append(n.text)):i.append(n.text),s.options.showTags&&n.tags&&e.each(n.tags,function(t,o){i.append(e(s.template.badge).append(o))}),s.$wrapper.append(i),n.nodes&&n.state.expanded&&!n.state.disabled)return s.buildTree(n.nodes,o)})}},i.prototype.buildStyleOverride=function(e){if(e.state.disabled)return"";var t=e.color,o=e.backColor;return e.selectable||(this.options.unselectableColor&&(t=this.options.unselectableColor),this.options.unselectableBackColor&&(o=this.options.unselectableBackColor)),this.options.highlightSelected&&e.state.selected&&(this.options.selectedColor&&(t=this.options.selectedColor),this.options.selectedBackColor&&(o=this.options.selectedBackColor)),this.options.highlightSearchResults&&e.searchResult&&!e.state.disabled&&(this.options.searchResultColor&&(t=this.options.searchResultColor),this.options.searchResultBackColor&&(o=this.options.searchResultBackColor)),"color:"+t+";background-color:"+o+";"},i.prototype.injectStyle=function(){this.options.injectStyle&&!o.getElementById(this.styleId)&&e('<style type="text/css" id="'+this.styleId+'"> '+this.buildStyle()+" </style>").appendTo("head")},i.prototype.buildStyle=function(){var e=".node-"+this.elementId+"{";return this.options.color&&(e+="color:"+this.options.color+";"),this.options.backColor&&(e+="background-color:"+this.options.backColor+";"),this.options.showBorder?this.options.borderColor&&(e+="border:1px solid "+this.options.borderColor+";"):e+="border:none;",e+="}",this.options.onhoverColor&&(e+=".node-"+this.elementId+":not(.node-disabled):hover{background-color:"+this.options.onhoverColor+";}"),this.css+e},i.prototype.template={list:'<ul class="list-group"></ul>',item:'<li class="list-group-item"></li>',indent:'<span class="indent"></span>',icon:'<span class="icon"></span>',link:'<a href="#" style="color:inherit;"></a>',badge:'<span class="badge"></span>'},i.prototype.css=".treeview .list-group-item{cursor:pointer}.treeview span.indent{margin-left:10px;margin-right:10px}.treeview span.icon{width:12px;margin-right:5px}.treeview .node-disabled{color:silver;cursor:not-allowed}",i.prototype.getNode=function(e){return this.nodes[e]},i.prototype.getParent=function(e){var t=this.identifyNode(e);return this.nodes[t.parentId]},i.prototype.getSiblings=function(e){var t=this.identifyNode(e),o=this.getParent(t);return(o?o.nodes:this.tree).filter(function(e){return e.nodeId!==t.nodeId})},i.prototype.getSelected=function(){return this.findNodes("true","g","state.selected")},i.prototype.getUnselected=function(){return this.findNodes("false","g","state.selected")},i.prototype.getExpanded=function(){return this.findNodes("true","g","state.expanded")},i.prototype.getCollapsed=function(){return this.findNodes("false","g","state.expanded")},i.prototype.getChecked=function(){return this.findNodes("true","g","state.checked")},i.prototype.getUnchecked=function(){return this.findNodes("false","g","state.checked")},i.prototype.getDisabled=function(){return this.findNodes("true","g","state.disabled")},i.prototype.getEnabled=function(){return this.findNodes("false","g","state.disabled")},i.prototype.selectNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setSelectedState(e,!0,t)},this)),this.render()},i.prototype.unselectNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setSelectedState(e,!1,t)},this)),this.render()},i.prototype.toggleNodeSelected=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.toggleSelectedState(e,t)},this)),this.render()},i.prototype.collapseAll=function(t){var o=this.findNodes("true","g","state.expanded");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setExpandedState(e,!1,t)},this)),this.render()},i.prototype.collapseNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setExpandedState(e,!1,t)},this)),this.render()},i.prototype.expandAll=function(t){if((t=e.extend({},n.options,t))&&t.levels)this.expandLevels(this.tree,t.levels,t);else{var o=this.findNodes("false","g","state.expanded");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setExpandedState(e,!0,t)},this))}this.render()},i.prototype.expandNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setExpandedState(e,!0,t),e.nodes&&t&&t.levels&&this.expandLevels(e.nodes,t.levels-1,t)},this)),this.render()},i.prototype.expandLevels=function(t,o,s){s=e.extend({},n.options,s),e.each(t,e.proxy(function(e,t){this.setExpandedState(t,o>0,s),t.nodes&&this.expandLevels(t.nodes,o-1,s)},this))},i.prototype.revealNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){for(var o=this.getParent(e);o;)this.setExpandedState(o,!0,t),o=this.getParent(o)},this)),this.render()},i.prototype.toggleNodeExpanded=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.toggleExpandedState(e,t)},this)),this.render()},i.prototype.checkAll=function(t){var o=this.findNodes("false","g","state.checked");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setCheckedState(e,!0,t)},this)),this.render()},i.prototype.checkNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setCheckedState(e,!0,t)},this)),this.render()},i.prototype.uncheckAll=function(t){var o=this.findNodes("true","g","state.checked");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setCheckedState(e,!1,t)},this)),this.render()},i.prototype.uncheckNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setCheckedState(e,!1,t)},this)),this.render()},i.prototype.toggleNodeChecked=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.toggleCheckedState(e,t)},this)),this.render()},i.prototype.disableAll=function(t){var o=this.findNodes("false","g","state.disabled");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setDisabledState(e,!0,t)},this)),this.render()},i.prototype.disableNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setDisabledState(e,!0,t)},this)),this.render()},i.prototype.enableAll=function(t){var o=this.findNodes("true","g","state.disabled");this.forEachIdentifier(o,t,e.proxy(function(e,t){this.setDisabledState(e,!1,t)},this)),this.render()},i.prototype.enableNode=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setDisabledState(e,!1,t)},this)),this.render()},i.prototype.toggleNodeDisabled=function(t,o){this.forEachIdentifier(t,o,e.proxy(function(e,t){this.setDisabledState(e,!e.state.disabled,t)},this)),this.render()},i.prototype.forEachIdentifier=function(t,o,s){o=e.extend({},n.options,o),t instanceof Array||(t=[t]),e.each(t,e.proxy(function(e,t){s(this.identifyNode(t),o)},this))},i.prototype.identifyNode=function(e){return"number"==typeof e?this.nodes[e]:e},i.prototype.search=function(t,o){o=e.extend({},n.searchOptions,o),this.clearSearch({render:!1});var s=[];if(t&&t.length>0){o.exactMatch&&(t="^"+t+"$");var i="g";o.ignoreCase&&(i+="i"),s=this.findNodes(t,i),e.each(s,function(e,t){t.searchResult=!0})}return o.revealResults?this.revealNode(s):this.render(),this.$element.trigger("searchComplete",e.extend(!0,{},s)),s},i.prototype.clearSearch=function(t){t=e.extend({},{render:!0},t);var o=e.each(this.findNodes("true","g","searchResult"),function(e,t){t.searchResult=!1});t.render&&this.render(),this.$element.trigger("searchCleared",e.extend(!0,{},o))},i.prototype.findNodes=function(t,o,s){o=o||"g",s=s||"text";var n=this;return e.grep(this.nodes,function(e){var i=n.getNodeValue(e,s);if("string"==typeof i)return i.match(new RegExp(t,o))})},i.prototype.getNodeValue=function(e,t){var o=t.indexOf(".");if(o>0){var s=e[t.substring(0,o)],n=t.substring(o+1,t.length);return this.getNodeValue(s,n)}return e.hasOwnProperty(t)?e[t].toString():void 0};var d=function(e){t.console&&t.console.error(e)};e.fn.treeview=function(t,o){var s;return this.each(function(){var n=e.data(this,"treeview");"string"==typeof t?n?e.isFunction(n[t])&&"_"!==t.charAt(0)?(o instanceof Array||(o=[o]),s=n[t].apply(n,o)):d("No such method : "+t):d("Not initialized, can not call method : "+t):"boolean"==typeof t?s=n:e.data(this,"treeview",new i(this,e.extend(!0,{},t)))}),s||this}}(jQuery,window,document);
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/apimanagement/v20220901preview/list_workspace_policy_fragment_references.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'ListWorkspacePolicyFragmentReferencesResult',
'AwaitableListWorkspacePolicyFragmentReferencesResult',
'list_workspace_policy_fragment_references',
'list_workspace_policy_fragment_references_output',
]
@pulumi.output_type
class ListWorkspacePolicyFragmentReferencesResult:
"""
A collection of resources.
"""
def __init__(__self__, count=None, next_link=None, value=None):
if count and not isinstance(count, float):
raise TypeError("Expected argument 'count' to be a float")
pulumi.set(__self__, "count", count)
if next_link and not isinstance(next_link, str):
raise TypeError("Expected argument 'next_link' to be a str")
pulumi.set(__self__, "next_link", next_link)
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def count(self) -> Optional[float]:
"""
Total record count number.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="nextLink")
def next_link(self) -> Optional[str]:
"""
Next page link if any.
"""
return pulumi.get(self, "next_link")
@property
@pulumi.getter
def value(self) -> Optional[Sequence['outputs.ResourceCollectionResponseValue']]:
"""
A collection of resources.
"""
return pulumi.get(self, "value")
class AwaitableListWorkspacePolicyFragmentReferencesResult(ListWorkspacePolicyFragmentReferencesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListWorkspacePolicyFragmentReferencesResult(
count=self.count,
next_link=self.next_link,
value=self.value)
def list_workspace_policy_fragment_references(id: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
skip: Optional[int] = None,
top: Optional[int] = None,
workspace_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListWorkspacePolicyFragmentReferencesResult:
"""
Lists policy resources that reference the policy fragment.
:param str id: A resource identifier.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param int skip: Number of records to skip.
:param int top: Number of records to return.
:param str workspace_id: Workspace identifier. Must be unique in the current API Management service instance.
"""
__args__ = dict()
__args__['id'] = id
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
__args__['skip'] = skip
__args__['top'] = top
__args__['workspaceId'] = workspace_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:apimanagement/v20220901preview:listWorkspacePolicyFragmentReferences', __args__, opts=opts, typ=ListWorkspacePolicyFragmentReferencesResult).value
return AwaitableListWorkspacePolicyFragmentReferencesResult(
count=pulumi.get(__ret__, 'count'),
next_link=pulumi.get(__ret__, 'next_link'),
value=pulumi.get(__ret__, 'value'))
@_utilities.lift_output_func(list_workspace_policy_fragment_references)
def list_workspace_policy_fragment_references_output(id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
skip: Optional[pulumi.Input[Optional[int]]] = None,
top: Optional[pulumi.Input[Optional[int]]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListWorkspacePolicyFragmentReferencesResult]:
"""
Lists policy resources that reference the policy fragment.
:param str id: A resource identifier.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param int skip: Number of records to skip.
:param int top: Number of records to return.
:param str workspace_id: Workspace identifier. Must be unique in the current API Management service instance.
"""
...
|
PypiClean
|
/openerp-core-7.0.406.tar.gz/openerp-core-7.0.406/openerp/tools/which.py
|
__docformat__ = 'restructuredtext en'
__all__ = 'which which_files pathsep defpath defpathext F_OK R_OK W_OK X_OK'.split()
import sys
from os import access, defpath, pathsep, environ, F_OK, R_OK, W_OK, X_OK
from os.path import exists, dirname, split, join
windows = sys.platform.startswith('win')
defpath = environ.get('PATH', defpath).split(pathsep)
if windows:
defpath.insert(0, '.') # can insert without checking, when duplicates are removed
# given the quite usual mess in PATH on Windows, let's rather remove duplicates
seen = set()
defpath = [dir for dir in defpath if dir.lower() not in seen and not seen.add(dir.lower())]
del seen
defpathext = [''] + environ.get('PATHEXT',
'.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC').lower().split(pathsep)
else:
defpathext = ['']
def which_files(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function yields full paths (not necessarily absolute paths),
in which the given file name matches an existing file in a directory on the path.
>>> def test_which(expected, *args, **argd):
... result = list(which_files(*args, **argd))
... assert result == expected, 'which_files: %s != %s' % (result, expected)
...
... try:
... result = [ which(*args, **argd) ]
... except IOError:
... result = []
... assert result[:1] == expected[:1], 'which: %s != %s' % (result[:1], expected[:1])
>>> if windows: cmd = environ['COMSPEC']
>>> if windows: test_which([cmd], 'cmd')
>>> if windows: test_which([cmd], 'cmd.exe')
>>> if windows: test_which([cmd], 'cmd', path=dirname(cmd))
>>> if windows: test_which([cmd], 'cmd', pathext='.exe')
>>> if windows: test_which([cmd], cmd)
>>> if windows: test_which([cmd], cmd, path='<nonexistent>')
>>> if windows: test_which([cmd], cmd, pathext='<nonexistent>')
>>> if windows: test_which([cmd], cmd[:-4])
>>> if windows: test_which([cmd], cmd[:-4], path='<nonexistent>')
>>> if windows: test_which([], 'cmd', path='<nonexistent>')
>>> if windows: test_which([], 'cmd', pathext='<nonexistent>')
>>> if windows: test_which([], '<nonexistent>/cmd')
>>> if windows: test_which([], cmd[:-4], pathext='<nonexistent>')
>>> if not windows: sh = '/bin/sh'
>>> if not windows: test_which([sh], 'sh')
>>> if not windows: test_which([sh], 'sh', path=dirname(sh))
>>> if not windows: test_which([sh], 'sh', pathext='<nonexistent>')
>>> if not windows: test_which([sh], sh)
>>> if not windows: test_which([sh], sh, path='<nonexistent>')
>>> if not windows: test_which([sh], sh, pathext='<nonexistent>')
>>> if not windows: test_which([], 'sh', mode=W_OK) # not running as root, are you?
>>> if not windows: test_which([], 'sh', path='<nonexistent>')
>>> if not windows: test_which([], '<nonexistent>/sh')
"""
filepath, file = split(file)
if filepath:
path = (filepath,)
elif path is None:
path = defpath
elif isinstance(path, str):
path = path.split(pathsep)
if pathext is None:
pathext = defpathext
elif isinstance(pathext, str):
pathext = pathext.split(pathsep)
if not '' in pathext:
pathext.insert(0, '') # always check command without extension, even for custom pathext
for dir in path:
basepath = join(dir, file)
for ext in pathext:
fullpath = basepath + ext
if exists(fullpath) and access(fullpath, mode):
yield fullpath
def which(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function returns full path (not necessarily absolute path),
in which the given file name matches an existing file in a directory on the path,
or raises IOError(errno.ENOENT).
>>> # for doctest see which_files()
"""
try:
return iter(which_files(file, mode, path, pathext)).next()
except StopIteration:
try:
from errno import ENOENT
except ImportError:
ENOENT = 2
raise IOError(ENOENT, '%s not found' % (mode & X_OK and 'command' or 'file'), file)
if __name__ == '__main__':
import doctest
doctest.testmod()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
PypiClean
|
/napari_mm3-0.0.14-py3-none-any.whl/napari_mm3/_deriving_widgets.py
|
from datetime import datetime
from magicgui.widgets import (
Container,
FileEdit,
LineEdit,
PushButton,
RangeEdit,
ComboBox,
)
from pathlib import Path
from .utils import TIFF_FILE_FORMAT_NO_PEAK, TIFF_FILE_FORMAT_PEAK
import h5py
import pickle
import yaml
import json
import tifffile as tiff
import re
import time
import sys
import traceback
# print a warning
def warning(*objs):
print(time.strftime("%H:%M:%S WARNING:", time.localtime()), *objs, file=sys.stderr)
def information(*objs):
print(time.strftime("%H:%M:%S", time.localtime()), *objs, file=sys.stdout)
def load_tiff(tiff_location: Path):
with tiff.TiffFile(tiff_location) as tif:
return tif.asarray()
def load_hdf5(hdf5_location: Path, dataset_name: str):
with h5py.File(hdf5_location, "r") as h5f:
return h5f[dataset_name]
def gen_tiff_filename(prefix, fov_id: int, postfix: str, peak_id: int = None):
if peak_id:
return TIFF_FILE_FORMAT_PEAK % (prefix, fov_id, peak_id, postfix)
return TIFF_FILE_FORMAT_NO_PEAK % (prefix, fov_id, postfix)
def load_stack_params(params, fov_id, peak_id, postfix="c1"):
"""
Deprecated.
Loads an image stack.
Supports reading TIFF stacks or HDF5 files.
Parameters
----------
fov_id : int
The FOV id
peak_id : int
The peak (channel) id. Dummy None value incase color='empty'
postfix : str
The image stack type to return. Can be:
c1 : phase stack
cN : where n is an integer for arbitrary color channel
sub_cN : subtracted images
seg_cN : segmented images
empty : get the empty channel for this fov, slightly different
Returns
-------
image_stack : np.ndarray
The image stack through time. Shape is (t, y, x)
"""
# things are slightly different for empty channels
if "empty" in postfix:
if params["output"] == "TIFF":
img_name = gen_tiff_filename(
prefix=params["experiment_name"], fov_id=fov_id, postfix=postfix
)
return load_tiff(params["empty_dir"] / img_name)
if params["output"] == "HDF5":
return load_hdf5(params["hdf5_dir"] / f"xy{fov_id:03d}.hdf5", postfix)
# load normal images for either TIFF or HDF5
if params["output"] == "TIFF":
if postfix[0] == "c":
img_dir = params["chnl_dir"]
elif "sub" in postfix:
img_dir = params["sub_dir"]
elif "foci" in postfix:
img_dir = params["foci_seg_dir"]
elif "seg" in postfix:
postfix = "seg_otsu"
if "seg_img" in params.keys():
postfix = params["seg_img"]
if "track" in params.keys():
postfix = params["track"]["seg_img"]
img_dir = params["seg_dir"]
img_filename = gen_tiff_filename(
prefix=params["experiment_name"],
fov_id=fov_id,
peak_id=peak_id,
postfix=postfix,
)
return load_tiff(img_dir / img_filename)
if params["output"] == "HDF5":
dataset_name = f"channel_{peak_id:04d}/p{peak_id:04d}_{postfix}"
filename = f"xy{fov_id:03d}.hdf5"
return load_hdf5(params["hdf5_dir"] / filename, dataset_name)
def load_specs(analysis_dir: Path):
"""Load specs file which indicates which channels should be analyzed, used as empties, or ignored."""
try:
with (analysis_dir / "specs.yaml").open("r") as specs_file:
specs = yaml.safe_load(specs_file)
except:
try:
with (analysis_dir / "specs.pkl").open("rb") as specs_file:
specs = pickle.load(specs_file)
except ValueError:
warning("Could not load specs file.")
return specs
# load the time table and add it to the global params
def load_time_table(ana_dir: Path):
"""Add the time table dictionary to the params global dictionary.
This is so it can be used during Cell creation.
"""
# try first for yaml, then for pkl
try:
with (ana_dir / "time_table.yaml").open("rb") as time_table_file:
return yaml.safe_load(time_table_file)
except:
with (ana_dir / "time_table.pkl").open("rb") as time_table_file:
return pickle.load(time_table_file)
def get_valid_planes(TIFF_folder):
found_files = TIFF_folder.glob("*.tif")
# pull out first tiff to extract dims
filepath = [f for f in found_files][0]
dim = tiff.imread(filepath).ndim
if dim == 3:
# first axis has imaging planes
num_channels = dim
elif dim == 2:
# only one plane (phase or fluorescence)
num_channels = 1
else:
raise ValueError(f"Expected 2 or 3 dimensions but found {dim}.")
return [f"c{c+1}" for c in range(num_channels)]
def get_valid_fovs(TIFF_folder):
found_files = TIFF_folder.glob("*.tif")
filenames = [f.name for f in found_files]
get_fov_regex = re.compile(r"xy(\d+)", re.IGNORECASE)
fov_strings = set(get_fov_regex.findall(filename)[0] for filename in filenames)
fovs = map(int, sorted(fov_strings))
return list(fovs)
def get_valid_times(TIFF_folder):
found_files = TIFF_folder.glob("*.tif")
filenames = [f.name for f in found_files]
get_time_regex = re.compile(r"t(\d+)", re.IGNORECASE)
time_strings = set(get_time_regex.findall(filename)[0] for filename in filenames)
times = list(map(int, sorted(time_strings)))
return (min(times), max(times))
def _serialize_widget(widget):
if isinstance(widget, RangeEdit) or isinstance(widget, TimeRangeSelector):
print("Range edit spotted!")
start_value = widget.start.value
final_value = widget.stop.value
return (start_value, final_value)
if isinstance(widget, PushButton):
return None
if isinstance(widget, FileEdit):
print(str(widget.value))
return str(widget.value)
return widget.value
def _apply_seralized_widget(widget, value):
if isinstance(widget, RangeEdit) or isinstance(widget, TimeRangeSelector):
print("Range edit spotted!")
widget.start.value = value[0]
widget.stop.value = value[1]
return
if isinstance(widget, PushButton):
return
widget.value = value
def range_string_to_indices(range_string):
try:
range_string = range_string.replace(" ", "")
split = range_string.split(",")
indices = []
for items in split:
# If it's a range
if "-" in items:
limits = list(map(int, items.split("-")))
if len(limits) == 2:
# Make it an inclusive range, as users would expect
limits[1] += 1
indices += list(range(limits[0], limits[1]))
# If it's a single item.
else:
indices += [int(items)]
print("Index range string valid!")
return indices
except:
print(
"Index range string invalid. Returning empty range until a new string is specified."
)
return []
class MM3Container(Container):
"""
Preset class for MM3 widgets.
In order to use, extend the class and override the following methods:
* create_widgets: This is a constructor. If the MM3Container finds valid TIFFs, these widgets will be added to the UI
* run: This is the function that will be executed when the user clicks the 'run' button.
This class supplies the followng fields from the user directly:
* experiment_name (self-explanatory)
* analysis_folder (the location to which outputs will be written)
* TIFF_folder (the folder in which it will look for input TIFFs)
It will also acquire the following metadata for you:
* valid_fovs (a range of valid fovs)
* valid_times (a range of valid times)
* valid_planes (a set of valid microscopy (eg, phase, fluorescence, etc))
Finally, it will also automatically write any 'runs' to history.json, and give you the ability to restore the most recent run's settings.
"""
def __init__(self, napari_viewer, validate_folders: bool = True):
super().__init__()
self.viewer = napari_viewer
self.validate_folders = validate_folders
self.analysis_folder_widget = FileEdit(
mode="d",
label="analysis folder",
tooltip="Required. Location for outputting analysis. If in doubt, leave as default.",
value=Path(".") / "analysis",
)
self.TIFF_folder_widget = FileEdit(
mode="d",
label="TIFF folder",
tooltip="Required. Location for the input images. If in doubt, leave as default.",
value=Path(".") / "TIFF",
)
self.experiment_name_widget = LineEdit(
label="output prefix",
tooltip="Optional. A prefix that will be prepended to output files. If in doubt, leave blank.",
)
self.load_recent_widget = PushButton(
label="load last run settings",
tooltip="load settings from the most recent run. we look for past runs in ./.history",
)
self.load_data_widget = PushButton(
label="set new directories",
tooltip="Load data from specified directories.",
)
self.run_widget = PushButton(
label="run",
)
self.experiment_name_widget.changed.connect(self._set_experiment_name)
self.TIFF_folder_widget.changed.connect(self._set_TIFF_folder)
self.analysis_folder_widget.changed.connect(self._set_analysis_folder)
self.load_recent_widget.clicked.connect(self._load_most_recent_settings)
self.load_data_widget.clicked.connect(self._set_valid_fovs)
self.load_data_widget.clicked.connect(self._set_valid_planes)
self.load_data_widget.clicked.connect(self._set_valid_times)
self.load_data_widget.clicked.connect(self._delete_extra_widgets)
self.load_data_widget.clicked.connect(self._load_from_data_conditional)
self.run_widget.clicked.connect(self._save_settings)
self.run_widget.clicked.connect(self._run_conditional)
self._set_experiment_name()
self._set_TIFF_folder()
self._set_analysis_folder()
self._set_valid_fovs()
self._set_valid_planes()
self._set_valid_times()
self.append(self.experiment_name_widget)
self.append(self.TIFF_folder_widget)
self.append(self.analysis_folder_widget)
self.append(self.load_recent_widget)
self.append(self.load_data_widget)
self._load_from_data_conditional()
def create_widgets(self):
"""Method to override. Place all widget initialization here."""
pass
def run(self):
"""Method to override. Any execution methods go here."""
pass
def _load_from_data_conditional(self):
if self.validate_folders and not self._validate_folders():
print(f"A folder validation was requested but not successful.\n")
print("Limited traceback:")
traceback.print_stack(limit=1)
return
if self.found_planes and self.found_fovs and self.found_times:
self.create_widgets()
self.append(self.run_widget)
return
print(f"Failed to find a key piece of info:")
print(f"planes found: {self.found_planes}")
print(f"fovs found: {self.found_fovs}")
print(f"times found: {self.found_times}")
print("Limited traceback:")
traceback.print_stack(limit=1)
def _run_conditional(self):
if self.found_planes and self.found_fovs and self.found_times:
self.run()
def _is_preset_widget(self, widget):
labels = {
self.experiment_name_widget.label,
self.TIFF_folder_widget.label,
self.analysis_folder_widget.label,
self.load_data_widget.label,
self.load_recent_widget.label,
}
return widget.label in labels
def _delete_extra_widgets(self):
"""Delete any widgets that come after the 'reload directories' button.
This allows for easy UI resets in deriving widgets (see, e.g. _track.py)"""
while not self._is_preset_widget(self[-1]):
self.pop()
def _set_analysis_folder(self):
self.analysis_folder = self.analysis_folder_widget.value
def _set_experiment_name(self):
self.experiment_name = self.experiment_name_widget.value
def _set_TIFF_folder(self):
self.TIFF_folder = self.TIFF_folder_widget.value
def _set_valid_fovs(self):
try:
self.valid_fovs = get_valid_fovs(self.TIFF_folder)
self.found_fovs = True
except:
self.found_fovs = False
def _set_valid_times(self):
try:
self.valid_times = get_valid_times(self.TIFF_folder)
self.found_times = True
except:
self.found_times = False
def _set_valid_planes(self):
try:
self.valid_planes = get_valid_planes(self.TIFF_folder)
self.found_planes = True
except FileNotFoundError:
self.found_planes = False
def _validate_folders(self):
return self.TIFF_folder.exists() and self.analysis_folder.exists()
def _get_most_recent_run(self):
"""
Gets the parameters from the most recent run of the current
widget.
"""
try:
with open("./history.json", "r") as h:
history = json.load(h)
except:
return {}
# get the most recent run of the relevant widget.
old_params = {}
for historic_widget_name, _, params in reversed(history):
if historic_widget_name == self.parent.name:
old_params = params
break
return old_params
def _load_most_recent_settings(self):
"""
Load most most recent entry in the history file that has
name == 'widget_name'.
Apply the saved parameters to the currently extant widgets.
"""
old_params = self._get_most_recent_run()
if old_params:
# assign old_params to current widgets.
for widget in self:
if self._is_preset_widget(widget):
continue
_apply_seralized_widget(widget, old_params.get(widget.label, ""))
def _save_settings(self):
"""
Save the current settings for all non-preset widgets.
name == 'widget_name'.
Apply the saved parameters to the currently extant widgets.
"""
widget_name = self.parent.name
history = []
if Path("./history.json").exists():
with open("./history.json", "r") as h:
history = json.load(h)
# Generate a dictionary of the current parameters.
current_params = {}
for widget in self:
if self._is_preset_widget(widget):
continue
if isinstance(widget, PushButton):
continue
current_params[widget.label] = _serialize_widget(widget)
# If the most recent run has the same parameters as our current run, do nothing.
old_params = self._get_most_recent_run()
if old_params and old_params == current_params:
return
timestamp = datetime.now()
history.append((widget_name, str(timestamp), current_params))
with open("./history.json", "w") as h:
json.dump(history, h, indent=2)
class TimeRangeSelector(RangeEdit):
def __init__(self, permitted_times):
label_str = f"time range (frames {permitted_times[0]}-{permitted_times[1]})"
super().__init__(
label=label_str,
tooltip="The time range to analyze. Note that 'step' is currently not supported.",
start=permitted_times[0],
stop=permitted_times[1],
min=permitted_times[0],
max=permitted_times[1],
)
class InteractiveSpinBox(Container):
"""
Our custom version of magicgui's 'SpinBox' widget.
* Supports floats (auto-rounds to 3 decimal points).
* 'Atomic' updates: If an expensive (single-thread) method is called on value change, this will work
as expected (unlike the default spinbox).
Try to only use this in contexts where you would like to perform single-threaded operations
upon changing a spinbox.
"""
def __init__(
self, min=0, max=99999, value=1, step=1, tooltip="", use_float=False, label=""
):
super().__init__(
layout="horizontal",
labels=False,
tooltip=tooltip,
)
self.margins = (0, 0, 0, 0)
self.min = min
self.max = max
self.step = step
self.value = value
self.use_float = use_float
self.name = label
self.text_widget = LineEdit(
value=self.value,
)
self.increment_widget = PushButton(label="+")
self.decrement_widget = PushButton(label="-")
self.text_widget.changed.connect(self._set_value)
self.increment_widget.changed.connect(self._increment)
self.decrement_widget.changed.connect(self._decrement)
self.append(self.text_widget)
self.append(self.increment_widget)
self.append(self.decrement_widget)
def connect(self, func):
self.text_widget.changed.connect(func)
def _set_value(self):
try:
if self.use_float:
self.value = float(self.text_widget.value)
else:
self.value = int(self.text_widget.value)
except:
# Casting failure is not a big deal. No point throwing an exception.
print("Failed to turn text into a number.")
return
# Enforce bounds on self.value
self.value = max(self.min, self.value)
self.value = min(self.max, self.value)
def _increment(self):
# Update internal value, then update displayed value.
# Desyncing the 'display' and 'internal' values allows us to display
# rounded floating points.
self.value = self.value + self.step
self.value = min(self.max, self.value)
if self.use_float:
self.text_widget.value = f"{self.value:.3f}"
else:
self.text_widget.value = self.value
def _decrement(self):
# Update internal value, then update displayed value.
self.value = self.value - self.step
self.value = max(self.min, self.value)
if self.use_float:
self.text_widget.value = f"{self.value:.3f}"
else:
self.text_widget.value = self.value
class FOVChooserSingle(InteractiveSpinBox):
def __init__(self, valid_fovs):
self.min_FOV = min(valid_fovs)
self.max_FOV = max(valid_fovs)
super().__init__(
label=f"FOV ({self.min_FOV}-{self.max_FOV})",
min=self.min_FOV,
max=self.max_FOV,
value=self.min_FOV,
step=1,
tooltip="Pick an FOV",
use_float=False,
)
class PlanePicker(ComboBox):
def __init__(
self,
permitted_planes,
label="microscopy plane",
tooltip="The plane you would like to use.",
):
super().__init__(label=label, choices=permitted_planes, tooltip=tooltip)
class FOVChooser(LineEdit):
"""
Widget for choosing multiple FOVs.
Use connect_callback(...) instead of super().changed.connect(...).
Additionally, the input function for connect_callback accepts a single
parameter (the range of FOVs)
"""
def __init__(self, permitted_FOVs, custom_label = None):
self.min_FOV = min(permitted_FOVs)
self.max_FOV = max(permitted_FOVs)
if custom_label:
label_str = f"{custom_label} ({self.min_FOV}-{self.max_FOV})"
else:
label_str = f"FOVs ({self.min_FOV}-{self.max_FOV})"
value_str = f"{self.min_FOV}-{self.max_FOV}"
super().__init__(
label=label_str,
value=value_str,
tooltip="A list of FOVs to analyze. Ranges and comma separated values allowed (e.g. '1-30', '2-4,15,18'.)",
)
def connect_callback(self, func):
"""Replaces self.changed.connect(...).
Interprets any text in the box as a list of FOVs.
Thus 'func' should operate on a list of FOVs, filtered by those that actually exist in the TIFs.
"""
def func_with_range():
user_fovs = range_string_to_indices(self.value)
if user_fovs:
func(user_fovs)
self.changed.connect(func_with_range)
|
PypiClean
|
/applepy-ui-0.0.5.tar.gz/applepy-ui-0.0.5/applepy/scenes/window.py
|
from typing import Callable, Optional, Union
from uuid import uuid4
from .. import Scene, Size, Point
from ..views.menu import MainMenu
from ..base.binding import AbstractBinding, bindable
from ..base.mixins import Modifiable
from ..base.utils import attachable, try_call
from ..base.view import View
from ..backend import _MACOS, _IOS
from ..base.errors import (
AddingMultipleChildrenToNonStackableViewError,
NotSupportedError
)
from ..base.transform_mixins import (
BackgroundColor,
AlphaValue,
HasShadow,
TitledControl,
Visible
)
if _MACOS:
from ..backend.app_kit import (
NSApp,
NSObject,
NSWindow,
NSWindowStyleMask,
NSBackingStoreType,
NSRect, NSPoint, NSSize,
objc_method
)
class Window(Scene,
Modifiable,
BackgroundColor,
AlphaValue,
HasShadow,
TitledControl,
Visible):
""" Display a MacOS Window. """
@bindable(Size)
def size(self) -> Size:
"""
Window content dimensions.
Data Binding: This property is a read-only bind. Setting this value will have no effect.
Returns:
Size: the dimensions of the window's content area.
"""
return self._size
@size.setter
def size(self, val: Size) -> None:
self._size = val
@bindable(Point)
def position(self) -> Point:
"""
Window position (origin).
Data Binding: This property is a read-only bind. Setting this value will have no effect.
Returns:
Point: the position (origin) of the window.
"""
return self._position
@position.setter
def position(self, val: Point) -> None:
self._position = val
@bindable(bool)
def full_screen(self) -> bool:
"""
Gets whether window is in full-screen mode.
Data Binding: This property is a read-only bind. Setting this value will have no effect.
Returns:
bool: `True` if window is in full-screen mode, `False` otherwise.
"""
return self._full_screen
@full_screen.setter
def full_screen(self, val: bool) -> None:
self._full_screen = val
@attachable(MainMenu)
def menu(self) -> MainMenu:
"""
A main menu to be displayed if this is the main window.
Do not call it directly, instead, add a MainMenu element to the window's stack.
Returns:
MainMenu: The application's main menu when this is the main window.
"""
return self._menu
@menu.setter
def menu(self, val: MainMenu) -> None:
self._menu = val
NSApp.mainMenu = val._main_menu
# @attachable(Toolbar)
# def toolbar(self) -> Toolbar:
# return self._toolbar
# @toolbar.setter
# def toolbar(self, val: Toolbar) -> None:
# self._toolbar = val
def __init__(self,
*,
title: Union[AbstractBinding, str],
size: Size,
position: Point = Point(0, 0),
borderless: bool = False,
titled: bool = True,
closable: bool = True,
resizable: bool = True,
miniaturizable: bool = True,
full_screen: bool = False,
full_size_content_view: bool = False,
utility_window: bool = False,
doc_modal_window: bool = False,
non_activating_panel: bool = False,
hud_window: bool = False,
min_size: Optional[Size] = None,
max_size: Optional[Size] = None,
on_close: Optional[Callable] = None,
on_resized: Optional[Callable] = None,
on_moved: Optional[Callable] = None,
on_full_screen_changed: Optional[Callable] = None,
on_minimized: Optional[Callable] = None) -> None:
"""
Add a new `Window` scene, which generates a native MacOS window.
Windows, as scenes, can only be a top level item in the App's body method, they cannot be a stacked or child of a view.
The first `Window` instance added to the application will be set as the main window for the application, but for it to be effective,
return the window object at the end of the body method.
Use it in a `with` statement in order to add children to it. Example:
>>> with Window() as w:
Label(text='hello world')
return w
If you need more than one widget in the same window, use layout widgets such as the `StackView`:
>>> with Window() as w:
with VerticalStack():
Label(text='I am at the top')
Label(text='I am at the bottom')
return w
A window can also receive modifiers. For instance, to bind the `visible` property to an external bindable, use:
>>> with Window() as w:
return w.is_visible(Binding(MyApp.show_main_window, self))
Args:
title (Union[AbstractBinding, str]): The window's title. This parameter accepts binding. Binding can also be set later with the `set_title` modifier.
size (Size): The window's initial content size.
position (Point, optional): The window's initial position (origin). Defaults to Point(0, 0).
borderless (bool, optional): Whether the window should be borderless. Defaults to False.
titled (bool, optional): Whether the window should display the title bar. Defaults to True.
closable (bool, optional): Whether the window should be closable. Defaults to True.
resizable (bool, optional): Whether the window should be resizable. Defaults to True.
miniaturizable (bool, optional): Whether the window should be minimizable. Defaults to True.
full_screen (bool, optional): Whether the window should be open in full-screen mode. Defaults to False.
full_size_content_view (bool, optional): Whether the window should have a full sized content view. Defaults to False.
utility_window (bool, optional): Whether the window should be displayed as a utility window. Defaults to False.
doc_modal_window (bool, optional): Whether the window is a document-modal panel. Defaults to False.
non_activating_panel (bool, optional): Whether the window is a `Panel` that does not activate the owning app. Defaults to False.
hud_window (bool, optional): Whether the window should be a HUD panel. Defaults to False.
min_size (Optional[Size], optional): The window's minimum size. Defaults to None.
max_size (Optional[Size], optional): The window's maximum size. Defaults to None.
on_close (Optional[Callable], optional): Action to be executed when the window closes. Defaults to None.
on_resized (Optional[Callable], optional): Action to be executed when the window is resized. Defaults to None.
on_moved (Optional[Callable], optional): Action to be executed when the window is moved. Defaults to None.
on_full_screen_changed (Optional[Callable], optional): Action to be executed when the window enters or exits full-screen mode. Defaults to None.
on_minimized (Optional[Callable], optional): Action to be executed when the window is minimized. Defaults to None.
"""
if _IOS:
raise NotSupportedError()
Scene.__init__(self, (type(None), Window))
Modifiable.__init__(self)
BackgroundColor.__init__(self)
TitledControl.__init__(self, title)
@objc_method
def windowWillClose_(_self, sender):
try_call(on_close)
@objc_method
def windowDidEndLiveResize_(_self, notification):
w_rect = self.window.contentRectForFrameRect_(self.window.frame)
self.size = Size(int(w_rect.size.width), int(w_rect.size.height))
self.position = Point(int(w_rect.origin.x), int(w_rect.origin.y))
try_call(on_resized)
@objc_method
def windowDidMove_(_self, notification):
w_rect = self.window.contentRectForFrameRect_(self.window.frame)
self.position = Point(int(w_rect.origin.x), int(w_rect.origin.y))
try_call(on_moved)
@objc_method
def windowWillEnterFullScreen_(_self, notification):
self.full_screen = True
try_call(on_full_screen_changed)
@objc_method
def windowWillExitFullScreen_(_self, notification):
self.full_screen = False
try_call(on_full_screen_changed)
@objc_method
def windowDidMiniaturize_(self, notification):
try_call(on_minimized)
_WindowDelegate = type(f'_WindowDelegate_{uuid4().hex[:8]}', (NSObject,), {
'windowWillClose_': windowWillClose_,
'windowDidEndLiveResize_': windowDidEndLiveResize_,
'windowDidMove_': windowDidMove_,
'windowWillEnterFullScreen_': windowWillEnterFullScreen_,
'windowWillExitFullScreen_': windowWillExitFullScreen_,
'windowDidMiniaturize_': windowDidMiniaturize_
})
self.window = None
self._controller = _WindowDelegate.alloc().init()
# bindables
self._size = size
self._position = position
self._full_screen = full_screen
# regular properties
self.borderless = borderless
self.titled = titled
self.closable = closable
self.resizable = resizable
self.miniaturizable = miniaturizable
self.full_size_content_view = full_size_content_view
self.utility_window = utility_window
self.doc_modal_window = doc_modal_window
self.non_activating_panel = non_activating_panel
self.hud_window = hud_window
self.min_size = min_size
self.max_size = max_size
# child views
self.content_view: Optional[NSObject] = None
# attachables
self._menu: Optional[View] = None
self._toolbar: Optional[View] = None
# inferred properties
self.is_main = False
def body(self) -> Scene:
"""
Window's body method.
It can be overriden in the View code.
It is used internally for rendering the components, do not call it directly.
Returns:
Window: self
"""
return super().body()
def get_ns_object(self) -> NSWindow:
"""
Window's NSWindow instance.
Do not call it directly, use the ns_object property instead.
Returns:
NSWindow: window's NSWindow instance.
"""
return self.window
def parse(self) -> Scene:
"""
Window's parse method.
It is used internally for rendering the components. Do not call it directly.
Returns:
Window: self
"""
style_mask = 0
if self.borderless:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskBorderless.value
if self.titled:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskTitled.value
if self.closable:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskClosable.value
if self.resizable:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskResizable.value
if self.miniaturizable:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskMiniaturizable.value
if self.full_screen:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskFullScreen.value
if self.full_size_content_view:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskFullSizeContentView.value
if self.doc_modal_window:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskDocModalWindow.value
if self.non_activating_panel:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskNonactivatingPanel.value
if self.hud_window:
style_mask |= NSWindowStyleMask.NSWindowStyleMaskHUDWindow.value
self.window = NSWindow.alloc().initWithContentRect_styleMask_backing_defer_(
NSRect(NSPoint(self.position.x, self.position.y),
NSSize(self.size.width, self.size.height)),
style_mask,
NSBackingStoreType.NSBackingStoreBuffered.value,
False
)
self.window.delegate = self._controller
self.window.orderFrontRegardless()
self.window.title = self.title
if self.min_size:
self.window.minSize = NSSize(self.min_size.width, self.min_size.height)
if self.max_size:
self.window.maxSize = NSSize(self.max_size.width, self.max_size.height)
Scene.parse(self)
Modifiable.parse(self)
return self
def set_content_view(self, content_view: NSObject) -> None:
"""
Sets the window's content view.
A window is not a stacked view, so it can only have one content view.
It is used internally for rendering the components, do not call it directly.
Args:
content_view (NSObject): The content view to be added to the window.
Raises:
AddingMultipleChildrenToNonStackableViewError: Window already has a content view.
"""
if self.content_view:
raise AddingMultipleChildrenToNonStackableViewError()
self.content_view = content_view
self.window.contentView = content_view
def center(self) -> None:
"""
Center the window in the screen.
"""
def __modifier():
self.window.center()
self._modifiers.append(__modifier)
return self
|
PypiClean
|
/python-sunlightapi-1.1.1.tar.gz/python-sunlightapi-1.1.1/README.rst
|
==================
python-sunlightapi
==================
.. warning::
This library is deprecated in favor of the more comprehensive `python-sunlight <http://python-sunlight.readthedocs.org>`_.
Python library for interacting with the Sunlight Labs API.
The Sunlight Labs API provides legislator information and district lookups.
(http://services.sunlightlabs.com/api/)
python-sunlightapi is a project of Sunlight Labs (c) 2010.
Written by James Turk <[email protected]>.
All code is under a BSD-style license, see LICENSE for details.
Homepage: http://pypi.python.org/pypi/python-sunlightapi/
Source: http://github.com/sunlightlabs/python-sunlightapi/
The package can be installed via pip, easy_install or by downloading the
source and running ``python setup.py install``.
Requirements
============
python >= 2.4
simplejson >= 1.8 (not required with python 2.6, will use built in json module)
Usage
=====
To initialize the api, all that is required is for it to be imported and for an
API key to be defined.
(If you do not have an API key visit http://services.sunlightlabs.com/api/ to
register for one.)
Import ``sunlight`` from ``sunlightapi``:
>>> from sunlightapi import sunlight, SunlightApiError
And set your API key:
>>> sunlight.apikey = 'your-key-here'
-------------------
legislators methods
-------------------
The legislators namespace is comprised of several functions:
* legislators.get - get a single legislator
* legislators.getList - get zero or more legislators
* legislators.search - fuzzy search for legislators by name
* legislators.allForZip - get all legislators representing a zipcode
* legislators.allForLatLong - get all legislators representing a point
get and getList
---------------
legislators.get and legislators.getList both take any number of parameters and
return all legislators that match the provided criteria. These parameters are
also the ones returned in each legislator object.
The available parameters are:
* title
* firstname
* middlename
* lastname
* name_suffix
* nickname
* party
* state
* district
* in_office
* gender
* birthdate
* phone
* fax
* website
* webform
* email
* congress_office
* bioguide_id
* votesmart_id
* fec_id
* govtrack_id
* crp_id
* eventful_id
* congresspedia_url
* twitter_id
* official_rss
* youtube_url
* senate_class
* birthdate
To get the representative that represents NC-4:
>>> print(sunlight.legislators.get(state='NC', district='4'))
Rep. David Price (D-NC)
legislators.getList works much the same way, but returns a list. It is
possible to do a more complex query, for instance
"all legislators from New York that are Republicans":
>>> for leg in sunlight.legislators.getList(state='NY', party='R'):
... print(leg)
Rep. Pete King (R-NY)
Rep. Christopher Lee (R-NY)
**It is preferred that you do not call getList without parameters as it will
pull down all legislators, if you need to do this feel free to grab the provided
dump of the API data available at http://services.sunlightlabs.com/api/**
search
------
legislators.search allows you to query the database with a less than perfect
representation of a legislator's name.
The search is tolerant of use of nicknames, lastname-firstname juxtaposition,
initials and minor misspellings. The return is a set of results that include
legislator records as well as certainity scores between 0 and 1 (where 1 is
most certain).
Search takes two optional parameters
``threshold``
the minimum score you want to return, the default is 0.8 and you should rarely go lower than 0.7.
``all_legislators``
if True will search legislators in the API that are no longer in office (default is False)
An example usage of search is as follows:
>>> for r in sunlight.legislators.search('Diane Finestine'):
... print(r)
0.92125 Sen. Dianne Feinstein (D-CA)
It is also possible to get multiple results:
>>> for r in sunlight.legislators.search('Frank'):
... print(r)
1.0 Rep. Barney Frank (D-MA)
0.972222222222 Rep. Trent Franks (R-AZ)
0.952380952381 Sen. Al Franken (D-MN)
allForZip
---------
legislators.allForZip retrieves all legislators that represent a given zipcode.
This typically means two senators and one (or more) representatives.
To get all legislators that represent the 27511 zipcode:
>>> for legislator in sunlight.legislators.allForZip(27511):
... print(legislator)
Rep. David Price (D-NC)
Sen. Kay Hagan (D-NC)
Sen. Richard Burr (R-NC)
Rep. Brad Miller (D-NC)
allForLatLong
-------------
legislators.allForLatLong retrieves all legislators representing a given point.
This is a shortcut for calling districts.getDistrictFromLatLong and then
looking up the district representative and state senators.
To get all legislators that represent a location in western PA at 41.92, -80.14:
>>> for legislator in sunlight.legislators.allForLatLong(41.92, -80.14):
... print(legislator)
Sen. Bob Casey (D-PA)
Sen. Arlen Specter (D-PA)
Rep. Kathy Dahlkemper (D-PA)
-----------------
districts methods
-----------------
The districts namespace is comprised of two functions:
* districts.getDistrictsFromZip
* districts.getDistrictFromLatLong
getDistrictsFromZip
-------------------
districts.getDistrictsFromZip fetches all districts that overlap a given
zipcode.
To get all districts that overlap 14623:
>>> for district in sunlight.districts.getDistrictsFromZip(14623):
... print(district)
NY-29
NY-28
getDistrictFromLatLong
----------------------
districts.getDistrictFromLatLong finds the district that a given lat-long
coordinate pair falls within.
To find out what district 61.13 N, 149.54 W falls within:
>>> print(sunlight.districts.getDistrictFromLatLong(61.13, 149.54))
AK-0
This point is in fact in Anchorage, Alaska, so this is correct.
-----------------
committee methods
-----------------
The committee namespace contains:
* committee.getList
* committee.get
* committee.allForMember
getList
-------
committee.getList gets all committees for a given chamber (House, Senate, or Joint).
To see all joint committees for the current congress:
>>> for c in sunlight.committees.getList('Joint'):
... print(c)
Joint Economic Committee
Joint Committee on Printing
Joint Committee on Taxation
Joint Committee on the Library
get
---
committee.get gets full details for a given committee, including membership and subcommittees.
Example of getting details for a committee:
>>> com = sunlight.committees.get('HSAG')
>>> print(com.name)
House Committee on Agriculture
>>> for sc in com.subcommittees:
... print(sc)
Subcommittee on Conservation, Credit, Energy, and Research
Subcommittee on Department Operations, Oversight, Nutrition and Forestry
Subcommittee on General Farm Commodities and Risk Management
Subcommittee on Horticulture and Organic Agriculture
Subcommittee on Livestock, Dairy, and Poultry
Subcommittee on Rural Development, Biotechnology, Specialty Crops, and Foreign Agriculture
>>> for m in com.members:
... print(m)
Rep. Joe Baca (D-CA)
Rep. John Boccieri (D-OH)
Rep. Leonard Boswell (D-IA)
Rep. Bobby Bright (D-AL)
Rep. Dennis Cardoza (D-CA)
Rep. Bill Cassidy (R-LA)
Rep. Travis Childers (D-MS)
Rep. Mike Conaway (R-TX)
Rep. Jim Costa (D-CA)
Rep. Henry Cuellar (D-TX)
Rep. Kathy Dahlkemper (D-PA)
Rep. Brad Ellsworth (D-IN)
Rep. Jeff Fortenberry (R-NE)
Rep. Bob Goodlatte (R-VA)
Rep. Sam Graves (R-MO)
Rep. Debbie Halvorson (D-IL)
Rep. Stephanie Herseth Sandlin (D-SD)
Rep. Tim Holden (D-PA)
Rep. Tim Johnson (R-IL)
Rep. Steven Kagen (D-WI)
Rep. Steve King (R-IA)
Rep. Larry Kissell (D-NC)
Rep. Frank Kratovil (D-MD)
Rep. Bob Latta (R-OH)
Rep. Frank Lucas (R-OK)
Rep. Blaine Luetkemeyer (R-MO)
Rep. Cynthia Lummis (R-WY)
Rep. Betsy Markey (D-CO)
Rep. Jim Marshall (D-GA)
Rep. Eric Massa (D-NY)
Rep. Mike McIntyre (D-NC)
Rep. Walt Minnick (D-ID)
Rep. Jerry Moran (R-KS)
Rep. Randy Neugebauer (R-TX)
Rep. Collin Peterson (D-MN)
Rep. Earl Pomeroy (D-ND)
Rep. Phil Roe (R-TN)
Rep. Mike Rogers (R-AL)
Rep. Mark Schauer (D-MI)
Rep. Jean Schmidt (R-OH)
Rep. Kurt Schrader (D-OR)
Rep. David Scott (D-GA)
Rep. Adrian Smith (R-NE)
Rep. G.T. Thompson (R-PA)
Rep. Tim Walz (D-MN)
allForLegislator
----------------
All for legislator shows all of a legislator's committee and subcommittee memberships.
*note that the subcommittees included are only the subcommittees that the member has a seat on*
Showing all of a legislators committees and subcommittees:
>>> for com in sunlight.committees.allForLegislator('S000148'):
... print(com)
... for sc in com.subcommittees:
... print(' '+str(sc))
Senate Committee on Rules and Administration
Senate Committee on Finance
Subcommittee on International Trade and Global Competitiveness
Subcommittee on Social Security, Pensions and Family Policy
Subcommittee on Taxation, IRS Oversight, and Long-term Growth
Joint Committee on the Library
Joint Economic Committee
Senate Commmittee on the Judiciary
Subcommittee on Administrative Oversight and the Courts
Subcommittee on Antitrust, Competition Policy and Consumer Rights
Subcommittee on Crime and Drugs
Subcommittee on Immigration, Refugees and Border Security
Subcommittee on Terrorism and Homeland Security
Joint Committee on Printing
Senate Committee on Banking, Housing, and Urban Affairs
Subcommittee on Securities, Insurance, and Investment
Subcommittee on Financial Institutions
Subcommittee on Housing, Transportation, and Community Development
|
PypiClean
|
/mypy-boto3-kms-1.28.37.tar.gz/mypy-boto3-kms-1.28.37/mypy_boto3_kms/type_defs.py
|
import sys
from datetime import datetime
from typing import IO, Any, Dict, List, Mapping, Sequence, Union
from botocore.response import StreamingBody
from .literals import (
AlgorithmSpecType,
ConnectionErrorCodeTypeType,
ConnectionStateTypeType,
CustomerMasterKeySpecType,
CustomKeyStoreTypeType,
DataKeyPairSpecType,
DataKeySpecType,
EncryptionAlgorithmSpecType,
ExpirationModelTypeType,
GrantOperationType,
KeyManagerTypeType,
KeySpecType,
KeyStateType,
KeyUsageTypeType,
MacAlgorithmSpecType,
MessageTypeType,
MultiRegionKeyTypeType,
OriginTypeType,
SigningAlgorithmSpecType,
WrappingKeySpecType,
XksProxyConnectivityTypeType,
)
if sys.version_info >= (3, 12):
from typing import Literal
else:
from typing_extensions import Literal
if sys.version_info >= (3, 12):
from typing import NotRequired
else:
from typing_extensions import NotRequired
if sys.version_info >= (3, 12):
from typing import TypedDict
else:
from typing_extensions import TypedDict
__all__ = (
"AliasListEntryTypeDef",
"BlobTypeDef",
"CancelKeyDeletionRequestRequestTypeDef",
"ResponseMetadataTypeDef",
"ConnectCustomKeyStoreRequestRequestTypeDef",
"CreateAliasRequestRequestTypeDef",
"XksProxyAuthenticationCredentialTypeTypeDef",
"GrantConstraintsTypeDef",
"TagTypeDef",
"XksProxyConfigurationTypeTypeDef",
"DeleteAliasRequestRequestTypeDef",
"DeleteCustomKeyStoreRequestRequestTypeDef",
"DeleteImportedKeyMaterialRequestRequestTypeDef",
"PaginatorConfigTypeDef",
"DescribeCustomKeyStoresRequestRequestTypeDef",
"DescribeKeyRequestRequestTypeDef",
"DisableKeyRequestRequestTypeDef",
"DisableKeyRotationRequestRequestTypeDef",
"DisconnectCustomKeyStoreRequestRequestTypeDef",
"EnableKeyRequestRequestTypeDef",
"EnableKeyRotationRequestRequestTypeDef",
"GenerateDataKeyPairWithoutPlaintextRequestRequestTypeDef",
"GenerateDataKeyWithoutPlaintextRequestRequestTypeDef",
"GetKeyPolicyRequestRequestTypeDef",
"GetKeyRotationStatusRequestRequestTypeDef",
"GetParametersForImportRequestRequestTypeDef",
"GetPublicKeyRequestRequestTypeDef",
"GrantConstraintsPaginatorTypeDef",
"TimestampTypeDef",
"KeyListEntryTypeDef",
"XksKeyConfigurationTypeTypeDef",
"ListAliasesRequestRequestTypeDef",
"ListGrantsRequestRequestTypeDef",
"ListKeyPoliciesRequestRequestTypeDef",
"ListKeysRequestRequestTypeDef",
"ListResourceTagsRequestRequestTypeDef",
"ListRetirableGrantsRequestRequestTypeDef",
"MultiRegionKeyTypeDef",
"PutKeyPolicyRequestRequestTypeDef",
"RetireGrantRequestRequestTypeDef",
"RevokeGrantRequestRequestTypeDef",
"ScheduleKeyDeletionRequestRequestTypeDef",
"UntagResourceRequestRequestTypeDef",
"UpdateAliasRequestRequestTypeDef",
"UpdateKeyDescriptionRequestRequestTypeDef",
"UpdatePrimaryRegionRequestRequestTypeDef",
"EncryptRequestRequestTypeDef",
"GenerateMacRequestRequestTypeDef",
"ReEncryptRequestRequestTypeDef",
"RecipientInfoTypeDef",
"SignRequestRequestTypeDef",
"VerifyMacRequestRequestTypeDef",
"VerifyRequestRequestTypeDef",
"CancelKeyDeletionResponseTypeDef",
"CreateCustomKeyStoreResponseTypeDef",
"CreateGrantResponseTypeDef",
"DecryptResponseTypeDef",
"EmptyResponseMetadataTypeDef",
"EncryptResponseTypeDef",
"GenerateDataKeyPairResponseTypeDef",
"GenerateDataKeyPairWithoutPlaintextResponseTypeDef",
"GenerateDataKeyResponseTypeDef",
"GenerateDataKeyWithoutPlaintextResponseTypeDef",
"GenerateMacResponseTypeDef",
"GenerateRandomResponseTypeDef",
"GetKeyPolicyResponseTypeDef",
"GetKeyRotationStatusResponseTypeDef",
"GetParametersForImportResponseTypeDef",
"GetPublicKeyResponseTypeDef",
"ListAliasesResponseTypeDef",
"ListKeyPoliciesResponseTypeDef",
"ReEncryptResponseTypeDef",
"ScheduleKeyDeletionResponseTypeDef",
"SignResponseTypeDef",
"VerifyMacResponseTypeDef",
"VerifyResponseTypeDef",
"CreateCustomKeyStoreRequestRequestTypeDef",
"UpdateCustomKeyStoreRequestRequestTypeDef",
"CreateGrantRequestRequestTypeDef",
"GrantListEntryTypeDef",
"CreateKeyRequestRequestTypeDef",
"ListResourceTagsResponseTypeDef",
"ReplicateKeyRequestRequestTypeDef",
"TagResourceRequestRequestTypeDef",
"CustomKeyStoresListEntryTypeDef",
"DescribeCustomKeyStoresRequestDescribeCustomKeyStoresPaginateTypeDef",
"ListAliasesRequestListAliasesPaginateTypeDef",
"ListGrantsRequestListGrantsPaginateTypeDef",
"ListKeyPoliciesRequestListKeyPoliciesPaginateTypeDef",
"ListKeysRequestListKeysPaginateTypeDef",
"ListResourceTagsRequestListResourceTagsPaginateTypeDef",
"ListRetirableGrantsRequestListRetirableGrantsPaginateTypeDef",
"GrantListEntryPaginatorTypeDef",
"ImportKeyMaterialRequestRequestTypeDef",
"ListKeysResponseTypeDef",
"MultiRegionConfigurationTypeDef",
"DecryptRequestRequestTypeDef",
"GenerateDataKeyPairRequestRequestTypeDef",
"GenerateDataKeyRequestRequestTypeDef",
"GenerateRandomRequestRequestTypeDef",
"ListGrantsResponseTypeDef",
"DescribeCustomKeyStoresResponseTypeDef",
"ListGrantsResponsePaginatorTypeDef",
"KeyMetadataTypeDef",
"CreateKeyResponseTypeDef",
"DescribeKeyResponseTypeDef",
"ReplicateKeyResponseTypeDef",
)
AliasListEntryTypeDef = TypedDict(
"AliasListEntryTypeDef",
{
"AliasName": NotRequired[str],
"AliasArn": NotRequired[str],
"TargetKeyId": NotRequired[str],
"CreationDate": NotRequired[datetime],
"LastUpdatedDate": NotRequired[datetime],
},
)
BlobTypeDef = Union[str, bytes, IO[Any], StreamingBody]
CancelKeyDeletionRequestRequestTypeDef = TypedDict(
"CancelKeyDeletionRequestRequestTypeDef",
{
"KeyId": str,
},
)
ResponseMetadataTypeDef = TypedDict(
"ResponseMetadataTypeDef",
{
"RequestId": str,
"HostId": str,
"HTTPStatusCode": int,
"HTTPHeaders": Dict[str, str],
"RetryAttempts": int,
},
)
ConnectCustomKeyStoreRequestRequestTypeDef = TypedDict(
"ConnectCustomKeyStoreRequestRequestTypeDef",
{
"CustomKeyStoreId": str,
},
)
CreateAliasRequestRequestTypeDef = TypedDict(
"CreateAliasRequestRequestTypeDef",
{
"AliasName": str,
"TargetKeyId": str,
},
)
XksProxyAuthenticationCredentialTypeTypeDef = TypedDict(
"XksProxyAuthenticationCredentialTypeTypeDef",
{
"AccessKeyId": str,
"RawSecretAccessKey": str,
},
)
GrantConstraintsTypeDef = TypedDict(
"GrantConstraintsTypeDef",
{
"EncryptionContextSubset": NotRequired[Mapping[str, str]],
"EncryptionContextEquals": NotRequired[Mapping[str, str]],
},
)
TagTypeDef = TypedDict(
"TagTypeDef",
{
"TagKey": str,
"TagValue": str,
},
)
XksProxyConfigurationTypeTypeDef = TypedDict(
"XksProxyConfigurationTypeTypeDef",
{
"Connectivity": NotRequired[XksProxyConnectivityTypeType],
"AccessKeyId": NotRequired[str],
"UriEndpoint": NotRequired[str],
"UriPath": NotRequired[str],
"VpcEndpointServiceName": NotRequired[str],
},
)
DeleteAliasRequestRequestTypeDef = TypedDict(
"DeleteAliasRequestRequestTypeDef",
{
"AliasName": str,
},
)
DeleteCustomKeyStoreRequestRequestTypeDef = TypedDict(
"DeleteCustomKeyStoreRequestRequestTypeDef",
{
"CustomKeyStoreId": str,
},
)
DeleteImportedKeyMaterialRequestRequestTypeDef = TypedDict(
"DeleteImportedKeyMaterialRequestRequestTypeDef",
{
"KeyId": str,
},
)
PaginatorConfigTypeDef = TypedDict(
"PaginatorConfigTypeDef",
{
"MaxItems": NotRequired[int],
"PageSize": NotRequired[int],
"StartingToken": NotRequired[str],
},
)
DescribeCustomKeyStoresRequestRequestTypeDef = TypedDict(
"DescribeCustomKeyStoresRequestRequestTypeDef",
{
"CustomKeyStoreId": NotRequired[str],
"CustomKeyStoreName": NotRequired[str],
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
DescribeKeyRequestRequestTypeDef = TypedDict(
"DescribeKeyRequestRequestTypeDef",
{
"KeyId": str,
"GrantTokens": NotRequired[Sequence[str]],
},
)
DisableKeyRequestRequestTypeDef = TypedDict(
"DisableKeyRequestRequestTypeDef",
{
"KeyId": str,
},
)
DisableKeyRotationRequestRequestTypeDef = TypedDict(
"DisableKeyRotationRequestRequestTypeDef",
{
"KeyId": str,
},
)
DisconnectCustomKeyStoreRequestRequestTypeDef = TypedDict(
"DisconnectCustomKeyStoreRequestRequestTypeDef",
{
"CustomKeyStoreId": str,
},
)
EnableKeyRequestRequestTypeDef = TypedDict(
"EnableKeyRequestRequestTypeDef",
{
"KeyId": str,
},
)
EnableKeyRotationRequestRequestTypeDef = TypedDict(
"EnableKeyRotationRequestRequestTypeDef",
{
"KeyId": str,
},
)
GenerateDataKeyPairWithoutPlaintextRequestRequestTypeDef = TypedDict(
"GenerateDataKeyPairWithoutPlaintextRequestRequestTypeDef",
{
"KeyId": str,
"KeyPairSpec": DataKeyPairSpecType,
"EncryptionContext": NotRequired[Mapping[str, str]],
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
GenerateDataKeyWithoutPlaintextRequestRequestTypeDef = TypedDict(
"GenerateDataKeyWithoutPlaintextRequestRequestTypeDef",
{
"KeyId": str,
"EncryptionContext": NotRequired[Mapping[str, str]],
"KeySpec": NotRequired[DataKeySpecType],
"NumberOfBytes": NotRequired[int],
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
GetKeyPolicyRequestRequestTypeDef = TypedDict(
"GetKeyPolicyRequestRequestTypeDef",
{
"KeyId": str,
"PolicyName": str,
},
)
GetKeyRotationStatusRequestRequestTypeDef = TypedDict(
"GetKeyRotationStatusRequestRequestTypeDef",
{
"KeyId": str,
},
)
GetParametersForImportRequestRequestTypeDef = TypedDict(
"GetParametersForImportRequestRequestTypeDef",
{
"KeyId": str,
"WrappingAlgorithm": AlgorithmSpecType,
"WrappingKeySpec": WrappingKeySpecType,
},
)
GetPublicKeyRequestRequestTypeDef = TypedDict(
"GetPublicKeyRequestRequestTypeDef",
{
"KeyId": str,
"GrantTokens": NotRequired[Sequence[str]],
},
)
GrantConstraintsPaginatorTypeDef = TypedDict(
"GrantConstraintsPaginatorTypeDef",
{
"EncryptionContextSubset": NotRequired[Dict[str, str]],
"EncryptionContextEquals": NotRequired[Dict[str, str]],
},
)
TimestampTypeDef = Union[datetime, str]
KeyListEntryTypeDef = TypedDict(
"KeyListEntryTypeDef",
{
"KeyId": NotRequired[str],
"KeyArn": NotRequired[str],
},
)
XksKeyConfigurationTypeTypeDef = TypedDict(
"XksKeyConfigurationTypeTypeDef",
{
"Id": NotRequired[str],
},
)
ListAliasesRequestRequestTypeDef = TypedDict(
"ListAliasesRequestRequestTypeDef",
{
"KeyId": NotRequired[str],
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
ListGrantsRequestRequestTypeDef = TypedDict(
"ListGrantsRequestRequestTypeDef",
{
"KeyId": str,
"Limit": NotRequired[int],
"Marker": NotRequired[str],
"GrantId": NotRequired[str],
"GranteePrincipal": NotRequired[str],
},
)
ListKeyPoliciesRequestRequestTypeDef = TypedDict(
"ListKeyPoliciesRequestRequestTypeDef",
{
"KeyId": str,
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
ListKeysRequestRequestTypeDef = TypedDict(
"ListKeysRequestRequestTypeDef",
{
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
ListResourceTagsRequestRequestTypeDef = TypedDict(
"ListResourceTagsRequestRequestTypeDef",
{
"KeyId": str,
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
ListRetirableGrantsRequestRequestTypeDef = TypedDict(
"ListRetirableGrantsRequestRequestTypeDef",
{
"RetiringPrincipal": str,
"Limit": NotRequired[int],
"Marker": NotRequired[str],
},
)
MultiRegionKeyTypeDef = TypedDict(
"MultiRegionKeyTypeDef",
{
"Arn": NotRequired[str],
"Region": NotRequired[str],
},
)
PutKeyPolicyRequestRequestTypeDef = TypedDict(
"PutKeyPolicyRequestRequestTypeDef",
{
"KeyId": str,
"PolicyName": str,
"Policy": str,
"BypassPolicyLockoutSafetyCheck": NotRequired[bool],
},
)
RetireGrantRequestRequestTypeDef = TypedDict(
"RetireGrantRequestRequestTypeDef",
{
"GrantToken": NotRequired[str],
"KeyId": NotRequired[str],
"GrantId": NotRequired[str],
"DryRun": NotRequired[bool],
},
)
RevokeGrantRequestRequestTypeDef = TypedDict(
"RevokeGrantRequestRequestTypeDef",
{
"KeyId": str,
"GrantId": str,
"DryRun": NotRequired[bool],
},
)
ScheduleKeyDeletionRequestRequestTypeDef = TypedDict(
"ScheduleKeyDeletionRequestRequestTypeDef",
{
"KeyId": str,
"PendingWindowInDays": NotRequired[int],
},
)
UntagResourceRequestRequestTypeDef = TypedDict(
"UntagResourceRequestRequestTypeDef",
{
"KeyId": str,
"TagKeys": Sequence[str],
},
)
UpdateAliasRequestRequestTypeDef = TypedDict(
"UpdateAliasRequestRequestTypeDef",
{
"AliasName": str,
"TargetKeyId": str,
},
)
UpdateKeyDescriptionRequestRequestTypeDef = TypedDict(
"UpdateKeyDescriptionRequestRequestTypeDef",
{
"KeyId": str,
"Description": str,
},
)
UpdatePrimaryRegionRequestRequestTypeDef = TypedDict(
"UpdatePrimaryRegionRequestRequestTypeDef",
{
"KeyId": str,
"PrimaryRegion": str,
},
)
EncryptRequestRequestTypeDef = TypedDict(
"EncryptRequestRequestTypeDef",
{
"KeyId": str,
"Plaintext": BlobTypeDef,
"EncryptionContext": NotRequired[Mapping[str, str]],
"GrantTokens": NotRequired[Sequence[str]],
"EncryptionAlgorithm": NotRequired[EncryptionAlgorithmSpecType],
"DryRun": NotRequired[bool],
},
)
GenerateMacRequestRequestTypeDef = TypedDict(
"GenerateMacRequestRequestTypeDef",
{
"Message": BlobTypeDef,
"KeyId": str,
"MacAlgorithm": MacAlgorithmSpecType,
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
ReEncryptRequestRequestTypeDef = TypedDict(
"ReEncryptRequestRequestTypeDef",
{
"CiphertextBlob": BlobTypeDef,
"DestinationKeyId": str,
"SourceEncryptionContext": NotRequired[Mapping[str, str]],
"SourceKeyId": NotRequired[str],
"DestinationEncryptionContext": NotRequired[Mapping[str, str]],
"SourceEncryptionAlgorithm": NotRequired[EncryptionAlgorithmSpecType],
"DestinationEncryptionAlgorithm": NotRequired[EncryptionAlgorithmSpecType],
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
RecipientInfoTypeDef = TypedDict(
"RecipientInfoTypeDef",
{
"KeyEncryptionAlgorithm": NotRequired[Literal["RSAES_OAEP_SHA_256"]],
"AttestationDocument": NotRequired[BlobTypeDef],
},
)
SignRequestRequestTypeDef = TypedDict(
"SignRequestRequestTypeDef",
{
"KeyId": str,
"Message": BlobTypeDef,
"SigningAlgorithm": SigningAlgorithmSpecType,
"MessageType": NotRequired[MessageTypeType],
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
VerifyMacRequestRequestTypeDef = TypedDict(
"VerifyMacRequestRequestTypeDef",
{
"Message": BlobTypeDef,
"KeyId": str,
"MacAlgorithm": MacAlgorithmSpecType,
"Mac": BlobTypeDef,
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
VerifyRequestRequestTypeDef = TypedDict(
"VerifyRequestRequestTypeDef",
{
"KeyId": str,
"Message": BlobTypeDef,
"Signature": BlobTypeDef,
"SigningAlgorithm": SigningAlgorithmSpecType,
"MessageType": NotRequired[MessageTypeType],
"GrantTokens": NotRequired[Sequence[str]],
"DryRun": NotRequired[bool],
},
)
CancelKeyDeletionResponseTypeDef = TypedDict(
"CancelKeyDeletionResponseTypeDef",
{
"KeyId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateCustomKeyStoreResponseTypeDef = TypedDict(
"CreateCustomKeyStoreResponseTypeDef",
{
"CustomKeyStoreId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateGrantResponseTypeDef = TypedDict(
"CreateGrantResponseTypeDef",
{
"GrantToken": str,
"GrantId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DecryptResponseTypeDef = TypedDict(
"DecryptResponseTypeDef",
{
"KeyId": str,
"Plaintext": bytes,
"EncryptionAlgorithm": EncryptionAlgorithmSpecType,
"CiphertextForRecipient": bytes,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
EmptyResponseMetadataTypeDef = TypedDict(
"EmptyResponseMetadataTypeDef",
{
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
EncryptResponseTypeDef = TypedDict(
"EncryptResponseTypeDef",
{
"CiphertextBlob": bytes,
"KeyId": str,
"EncryptionAlgorithm": EncryptionAlgorithmSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateDataKeyPairResponseTypeDef = TypedDict(
"GenerateDataKeyPairResponseTypeDef",
{
"PrivateKeyCiphertextBlob": bytes,
"PrivateKeyPlaintext": bytes,
"PublicKey": bytes,
"KeyId": str,
"KeyPairSpec": DataKeyPairSpecType,
"CiphertextForRecipient": bytes,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateDataKeyPairWithoutPlaintextResponseTypeDef = TypedDict(
"GenerateDataKeyPairWithoutPlaintextResponseTypeDef",
{
"PrivateKeyCiphertextBlob": bytes,
"PublicKey": bytes,
"KeyId": str,
"KeyPairSpec": DataKeyPairSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateDataKeyResponseTypeDef = TypedDict(
"GenerateDataKeyResponseTypeDef",
{
"CiphertextBlob": bytes,
"Plaintext": bytes,
"KeyId": str,
"CiphertextForRecipient": bytes,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateDataKeyWithoutPlaintextResponseTypeDef = TypedDict(
"GenerateDataKeyWithoutPlaintextResponseTypeDef",
{
"CiphertextBlob": bytes,
"KeyId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateMacResponseTypeDef = TypedDict(
"GenerateMacResponseTypeDef",
{
"Mac": bytes,
"MacAlgorithm": MacAlgorithmSpecType,
"KeyId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GenerateRandomResponseTypeDef = TypedDict(
"GenerateRandomResponseTypeDef",
{
"Plaintext": bytes,
"CiphertextForRecipient": bytes,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GetKeyPolicyResponseTypeDef = TypedDict(
"GetKeyPolicyResponseTypeDef",
{
"Policy": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GetKeyRotationStatusResponseTypeDef = TypedDict(
"GetKeyRotationStatusResponseTypeDef",
{
"KeyRotationEnabled": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GetParametersForImportResponseTypeDef = TypedDict(
"GetParametersForImportResponseTypeDef",
{
"KeyId": str,
"ImportToken": bytes,
"PublicKey": bytes,
"ParametersValidTo": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GetPublicKeyResponseTypeDef = TypedDict(
"GetPublicKeyResponseTypeDef",
{
"KeyId": str,
"PublicKey": bytes,
"CustomerMasterKeySpec": CustomerMasterKeySpecType,
"KeySpec": KeySpecType,
"KeyUsage": KeyUsageTypeType,
"EncryptionAlgorithms": List[EncryptionAlgorithmSpecType],
"SigningAlgorithms": List[SigningAlgorithmSpecType],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAliasesResponseTypeDef = TypedDict(
"ListAliasesResponseTypeDef",
{
"Aliases": List[AliasListEntryTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListKeyPoliciesResponseTypeDef = TypedDict(
"ListKeyPoliciesResponseTypeDef",
{
"PolicyNames": List[str],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ReEncryptResponseTypeDef = TypedDict(
"ReEncryptResponseTypeDef",
{
"CiphertextBlob": bytes,
"SourceKeyId": str,
"KeyId": str,
"SourceEncryptionAlgorithm": EncryptionAlgorithmSpecType,
"DestinationEncryptionAlgorithm": EncryptionAlgorithmSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ScheduleKeyDeletionResponseTypeDef = TypedDict(
"ScheduleKeyDeletionResponseTypeDef",
{
"KeyId": str,
"DeletionDate": datetime,
"KeyState": KeyStateType,
"PendingWindowInDays": int,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
SignResponseTypeDef = TypedDict(
"SignResponseTypeDef",
{
"KeyId": str,
"Signature": bytes,
"SigningAlgorithm": SigningAlgorithmSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
VerifyMacResponseTypeDef = TypedDict(
"VerifyMacResponseTypeDef",
{
"KeyId": str,
"MacValid": bool,
"MacAlgorithm": MacAlgorithmSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
VerifyResponseTypeDef = TypedDict(
"VerifyResponseTypeDef",
{
"KeyId": str,
"SignatureValid": bool,
"SigningAlgorithm": SigningAlgorithmSpecType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateCustomKeyStoreRequestRequestTypeDef = TypedDict(
"CreateCustomKeyStoreRequestRequestTypeDef",
{
"CustomKeyStoreName": str,
"CloudHsmClusterId": NotRequired[str],
"TrustAnchorCertificate": NotRequired[str],
"KeyStorePassword": NotRequired[str],
"CustomKeyStoreType": NotRequired[CustomKeyStoreTypeType],
"XksProxyUriEndpoint": NotRequired[str],
"XksProxyUriPath": NotRequired[str],
"XksProxyVpcEndpointServiceName": NotRequired[str],
"XksProxyAuthenticationCredential": NotRequired[
XksProxyAuthenticationCredentialTypeTypeDef
],
"XksProxyConnectivity": NotRequired[XksProxyConnectivityTypeType],
},
)
UpdateCustomKeyStoreRequestRequestTypeDef = TypedDict(
"UpdateCustomKeyStoreRequestRequestTypeDef",
{
"CustomKeyStoreId": str,
"NewCustomKeyStoreName": NotRequired[str],
"KeyStorePassword": NotRequired[str],
"CloudHsmClusterId": NotRequired[str],
"XksProxyUriEndpoint": NotRequired[str],
"XksProxyUriPath": NotRequired[str],
"XksProxyVpcEndpointServiceName": NotRequired[str],
"XksProxyAuthenticationCredential": NotRequired[
XksProxyAuthenticationCredentialTypeTypeDef
],
"XksProxyConnectivity": NotRequired[XksProxyConnectivityTypeType],
},
)
CreateGrantRequestRequestTypeDef = TypedDict(
"CreateGrantRequestRequestTypeDef",
{
"KeyId": str,
"GranteePrincipal": str,
"Operations": Sequence[GrantOperationType],
"RetiringPrincipal": NotRequired[str],
"Constraints": NotRequired[GrantConstraintsTypeDef],
"GrantTokens": NotRequired[Sequence[str]],
"Name": NotRequired[str],
"DryRun": NotRequired[bool],
},
)
GrantListEntryTypeDef = TypedDict(
"GrantListEntryTypeDef",
{
"KeyId": NotRequired[str],
"GrantId": NotRequired[str],
"Name": NotRequired[str],
"CreationDate": NotRequired[datetime],
"GranteePrincipal": NotRequired[str],
"RetiringPrincipal": NotRequired[str],
"IssuingAccount": NotRequired[str],
"Operations": NotRequired[List[GrantOperationType]],
"Constraints": NotRequired[GrantConstraintsTypeDef],
},
)
CreateKeyRequestRequestTypeDef = TypedDict(
"CreateKeyRequestRequestTypeDef",
{
"Policy": NotRequired[str],
"Description": NotRequired[str],
"KeyUsage": NotRequired[KeyUsageTypeType],
"CustomerMasterKeySpec": NotRequired[CustomerMasterKeySpecType],
"KeySpec": NotRequired[KeySpecType],
"Origin": NotRequired[OriginTypeType],
"CustomKeyStoreId": NotRequired[str],
"BypassPolicyLockoutSafetyCheck": NotRequired[bool],
"Tags": NotRequired[Sequence[TagTypeDef]],
"MultiRegion": NotRequired[bool],
"XksKeyId": NotRequired[str],
},
)
ListResourceTagsResponseTypeDef = TypedDict(
"ListResourceTagsResponseTypeDef",
{
"Tags": List[TagTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ReplicateKeyRequestRequestTypeDef = TypedDict(
"ReplicateKeyRequestRequestTypeDef",
{
"KeyId": str,
"ReplicaRegion": str,
"Policy": NotRequired[str],
"BypassPolicyLockoutSafetyCheck": NotRequired[bool],
"Description": NotRequired[str],
"Tags": NotRequired[Sequence[TagTypeDef]],
},
)
TagResourceRequestRequestTypeDef = TypedDict(
"TagResourceRequestRequestTypeDef",
{
"KeyId": str,
"Tags": Sequence[TagTypeDef],
},
)
CustomKeyStoresListEntryTypeDef = TypedDict(
"CustomKeyStoresListEntryTypeDef",
{
"CustomKeyStoreId": NotRequired[str],
"CustomKeyStoreName": NotRequired[str],
"CloudHsmClusterId": NotRequired[str],
"TrustAnchorCertificate": NotRequired[str],
"ConnectionState": NotRequired[ConnectionStateTypeType],
"ConnectionErrorCode": NotRequired[ConnectionErrorCodeTypeType],
"CreationDate": NotRequired[datetime],
"CustomKeyStoreType": NotRequired[CustomKeyStoreTypeType],
"XksProxyConfiguration": NotRequired[XksProxyConfigurationTypeTypeDef],
},
)
DescribeCustomKeyStoresRequestDescribeCustomKeyStoresPaginateTypeDef = TypedDict(
"DescribeCustomKeyStoresRequestDescribeCustomKeyStoresPaginateTypeDef",
{
"CustomKeyStoreId": NotRequired[str],
"CustomKeyStoreName": NotRequired[str],
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListAliasesRequestListAliasesPaginateTypeDef = TypedDict(
"ListAliasesRequestListAliasesPaginateTypeDef",
{
"KeyId": NotRequired[str],
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListGrantsRequestListGrantsPaginateTypeDef = TypedDict(
"ListGrantsRequestListGrantsPaginateTypeDef",
{
"KeyId": str,
"GrantId": NotRequired[str],
"GranteePrincipal": NotRequired[str],
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListKeyPoliciesRequestListKeyPoliciesPaginateTypeDef = TypedDict(
"ListKeyPoliciesRequestListKeyPoliciesPaginateTypeDef",
{
"KeyId": str,
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListKeysRequestListKeysPaginateTypeDef = TypedDict(
"ListKeysRequestListKeysPaginateTypeDef",
{
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListResourceTagsRequestListResourceTagsPaginateTypeDef = TypedDict(
"ListResourceTagsRequestListResourceTagsPaginateTypeDef",
{
"KeyId": str,
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
ListRetirableGrantsRequestListRetirableGrantsPaginateTypeDef = TypedDict(
"ListRetirableGrantsRequestListRetirableGrantsPaginateTypeDef",
{
"RetiringPrincipal": str,
"PaginationConfig": NotRequired[PaginatorConfigTypeDef],
},
)
GrantListEntryPaginatorTypeDef = TypedDict(
"GrantListEntryPaginatorTypeDef",
{
"KeyId": NotRequired[str],
"GrantId": NotRequired[str],
"Name": NotRequired[str],
"CreationDate": NotRequired[datetime],
"GranteePrincipal": NotRequired[str],
"RetiringPrincipal": NotRequired[str],
"IssuingAccount": NotRequired[str],
"Operations": NotRequired[List[GrantOperationType]],
"Constraints": NotRequired[GrantConstraintsPaginatorTypeDef],
},
)
ImportKeyMaterialRequestRequestTypeDef = TypedDict(
"ImportKeyMaterialRequestRequestTypeDef",
{
"KeyId": str,
"ImportToken": BlobTypeDef,
"EncryptedKeyMaterial": BlobTypeDef,
"ValidTo": NotRequired[TimestampTypeDef],
"ExpirationModel": NotRequired[ExpirationModelTypeType],
},
)
ListKeysResponseTypeDef = TypedDict(
"ListKeysResponseTypeDef",
{
"Keys": List[KeyListEntryTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
MultiRegionConfigurationTypeDef = TypedDict(
"MultiRegionConfigurationTypeDef",
{
"MultiRegionKeyType": NotRequired[MultiRegionKeyTypeType],
"PrimaryKey": NotRequired[MultiRegionKeyTypeDef],
"ReplicaKeys": NotRequired[List[MultiRegionKeyTypeDef]],
},
)
DecryptRequestRequestTypeDef = TypedDict(
"DecryptRequestRequestTypeDef",
{
"CiphertextBlob": BlobTypeDef,
"EncryptionContext": NotRequired[Mapping[str, str]],
"GrantTokens": NotRequired[Sequence[str]],
"KeyId": NotRequired[str],
"EncryptionAlgorithm": NotRequired[EncryptionAlgorithmSpecType],
"Recipient": NotRequired[RecipientInfoTypeDef],
"DryRun": NotRequired[bool],
},
)
GenerateDataKeyPairRequestRequestTypeDef = TypedDict(
"GenerateDataKeyPairRequestRequestTypeDef",
{
"KeyId": str,
"KeyPairSpec": DataKeyPairSpecType,
"EncryptionContext": NotRequired[Mapping[str, str]],
"GrantTokens": NotRequired[Sequence[str]],
"Recipient": NotRequired[RecipientInfoTypeDef],
"DryRun": NotRequired[bool],
},
)
GenerateDataKeyRequestRequestTypeDef = TypedDict(
"GenerateDataKeyRequestRequestTypeDef",
{
"KeyId": str,
"EncryptionContext": NotRequired[Mapping[str, str]],
"NumberOfBytes": NotRequired[int],
"KeySpec": NotRequired[DataKeySpecType],
"GrantTokens": NotRequired[Sequence[str]],
"Recipient": NotRequired[RecipientInfoTypeDef],
"DryRun": NotRequired[bool],
},
)
GenerateRandomRequestRequestTypeDef = TypedDict(
"GenerateRandomRequestRequestTypeDef",
{
"NumberOfBytes": NotRequired[int],
"CustomKeyStoreId": NotRequired[str],
"Recipient": NotRequired[RecipientInfoTypeDef],
},
)
ListGrantsResponseTypeDef = TypedDict(
"ListGrantsResponseTypeDef",
{
"Grants": List[GrantListEntryTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeCustomKeyStoresResponseTypeDef = TypedDict(
"DescribeCustomKeyStoresResponseTypeDef",
{
"CustomKeyStores": List[CustomKeyStoresListEntryTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListGrantsResponsePaginatorTypeDef = TypedDict(
"ListGrantsResponsePaginatorTypeDef",
{
"Grants": List[GrantListEntryPaginatorTypeDef],
"NextMarker": str,
"Truncated": bool,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
KeyMetadataTypeDef = TypedDict(
"KeyMetadataTypeDef",
{
"KeyId": str,
"AWSAccountId": NotRequired[str],
"Arn": NotRequired[str],
"CreationDate": NotRequired[datetime],
"Enabled": NotRequired[bool],
"Description": NotRequired[str],
"KeyUsage": NotRequired[KeyUsageTypeType],
"KeyState": NotRequired[KeyStateType],
"DeletionDate": NotRequired[datetime],
"ValidTo": NotRequired[datetime],
"Origin": NotRequired[OriginTypeType],
"CustomKeyStoreId": NotRequired[str],
"CloudHsmClusterId": NotRequired[str],
"ExpirationModel": NotRequired[ExpirationModelTypeType],
"KeyManager": NotRequired[KeyManagerTypeType],
"CustomerMasterKeySpec": NotRequired[CustomerMasterKeySpecType],
"KeySpec": NotRequired[KeySpecType],
"EncryptionAlgorithms": NotRequired[List[EncryptionAlgorithmSpecType]],
"SigningAlgorithms": NotRequired[List[SigningAlgorithmSpecType]],
"MultiRegion": NotRequired[bool],
"MultiRegionConfiguration": NotRequired[MultiRegionConfigurationTypeDef],
"PendingDeletionWindowInDays": NotRequired[int],
"MacAlgorithms": NotRequired[List[MacAlgorithmSpecType]],
"XksKeyConfiguration": NotRequired[XksKeyConfigurationTypeTypeDef],
},
)
CreateKeyResponseTypeDef = TypedDict(
"CreateKeyResponseTypeDef",
{
"KeyMetadata": KeyMetadataTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeKeyResponseTypeDef = TypedDict(
"DescribeKeyResponseTypeDef",
{
"KeyMetadata": KeyMetadataTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ReplicateKeyResponseTypeDef = TypedDict(
"ReplicateKeyResponseTypeDef",
{
"ReplicaKeyMetadata": KeyMetadataTypeDef,
"ReplicaPolicy": str,
"ReplicaTags": List[TagTypeDef],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
|
PypiClean
|
/cuckoo_db-0.2.2-py3-none-any.whl/cuckoo/cuckoo.py
|
from cuckoo.migration import Migration
from peewee import ProgrammingError
from playhouse.reflection import Introspector
from playhouse.migrate import PostgresqlMigrator
import importlib.util
import os
class Migrator(object):
def __init__(self, db, path):
self.db = db
self._path = path
self._introspector = Introspector.from_database(self.db)
self.models = self._introspector.generate_models()
self.migration_ids = [filename.split('.')[0] for filename in filter(lambda f: f.endswith('.py') and '__init__' not in f, sorted(os.listdir(self._path)))]
def _load_migration(self, m_id):
module_name = f'{os.path.relpath(self._path).replace("/", ".")}.{m_id}'
spec = importlib.util.spec_from_file_location(module_name, os.path.join(self._path, f'{m_id}.py'))
m_mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(m_mod)
return m_mod
def _apply_single_migration(self, pg_migrator, direction, migration_id, fn):
with self.db.atomic():
try:
print(f'Processing migration [{migration_id}].')
fn(self.db, pg_migrator)
if direction == 'up':
Migration(migration_id=migration_id).save()
elif direction == 'down':
Migration.delete().where(Migration.migration_id == migration_id).execute()
except Exception as e:
print(f'An error occured while processing migration [{migration_id}]: \n\t- {e}')
raise RuntimeError
def try_prepare_migrations(self):
self.db.create_tables([Migration], safe=True)
def apply(self, direction):
applied_migration_ids = set([m.migration_id for m in Migration.select().execute()])
pg_migrator = PostgresqlMigrator(self.db)
if direction == 'up':
ids = filter(lambda m: m not in applied_migration_ids, self.migration_ids)
elif direction == 'down':
ids = filter(lambda m: m in applied_migration_ids, self.migration_ids[::-1])
applied_atleast_one = False
for m_id in ids:
applied_atleast_one = True
fn = getattr(self._load_migration(m_id), direction)
try:
self._apply_single_migration(pg_migrator, direction, m_id, fn)
except RuntimeError:
break
if not applied_atleast_one:
print('Nothing to do.')
def run(self, direction):
print(f'Migrating [{direction}].')
self.try_prepare_migrations()
# Apply migrations
self.apply(direction)
print('Migration complete.')
|
PypiClean
|
/rockset_v2-2.0.2.tar.gz/rockset_v2-2.0.2/rockset_v2/model/s3_collection_creation_request.py
|
import re # noqa: F401
import sys # noqa: F401
from rockset_v2.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from rockset_v2.exceptions import ApiAttributeError
def lazy_import():
from rockset_v2.model.event_time_info import EventTimeInfo
from rockset_v2.model.field_mapping_query import FieldMappingQuery
from rockset_v2.model.field_partition import FieldPartition
from rockset_v2.model.s3_source_wrapper import S3SourceWrapper
globals()['EventTimeInfo'] = EventTimeInfo
globals()['FieldMappingQuery'] = FieldMappingQuery
globals()['FieldPartition'] = FieldPartition
globals()['S3SourceWrapper'] = S3SourceWrapper
class S3CollectionCreationRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('storage_compression_type',): {
'LZ4': "LZ4",
'ZSTD': "ZSTD",
},
}
validations = {
('retention_secs',): {
'inclusive_minimum': 1,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'name': (str,), # noqa: E501
'clustering_key': ([FieldPartition], none_type), # noqa: E501
'description': (str, none_type), # noqa: E501
'event_time_info': (EventTimeInfo, none_type), # noqa: E501
'field_mapping_query': (FieldMappingQuery, none_type), # noqa: E501
'retention_secs': (int, none_type), # noqa: E501
'sources': ([S3SourceWrapper], none_type), # noqa: E501
'storage_compression_type': (str, none_type), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'name': 'name', # noqa: E501
'clustering_key': 'clustering_key', # noqa: E501
'description': 'description', # noqa: E501
'event_time_info': 'event_time_info', # noqa: E501
'field_mapping_query': 'field_mapping_query', # noqa: E501
'retention_secs': 'retention_secs', # noqa: E501
'sources': 'sources', # noqa: E501
'storage_compression_type': 'storage_compression_type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501
"""S3CollectionCreationRequest - a model defined in OpenAPI
Args:
name (str): Unique identifier for collection, can contain alphanumeric or dash characters.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
clustering_key ([FieldPartition]): Deprecated. List of clustering fields. Use CLUSTER BY clause in `field_mapping_query` instead.. [optional] # noqa: E501
description (str): Text describing the collection.. [optional] # noqa: E501
event_time_info (EventTimeInfo): [optional] # noqa: E501
field_mapping_query (FieldMappingQuery): [optional] # noqa: E501
retention_secs (int): Number of seconds after which data is purged, based on event time.. [optional] # noqa: E501
sources ([S3SourceWrapper]): List of sources from which to ingest data. [optional] # noqa: E501
storage_compression_type (str): RocksDB storage compression type.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *, name, **kwargs): # noqa: E501
"""S3CollectionCreationRequest - a model defined in OpenAPI
Keyword Args:
name (str): Unique identifier for collection, can contain alphanumeric or dash characters.
clustering_key ([FieldPartition]): Deprecated. List of clustering fields. Use CLUSTER BY clause in `field_mapping_query` instead.. [optional] # noqa: E501
description (str): Text describing the collection.. [optional] # noqa: E501
event_time_info (EventTimeInfo): [optional] # noqa: E501
field_mapping_query (FieldMappingQuery): [optional] # noqa: E501
retention_secs (int): Number of seconds after which data is purged, based on event time.. [optional] # noqa: E501
sources ([S3SourceWrapper]): List of sources from which to ingest data. [optional] # noqa: E501
storage_compression_type (str): RocksDB storage compression type.. [optional] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
args = []
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
# todo: remove these comments - this stops the user from setting read only vars but we need this now to address a bug
# if var_name in self.read_only_vars:
# raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
# f"class with read only attributes.")
|
PypiClean
|
/imperva-sdk-0.2.0.tar.gz/imperva-sdk-0.2.0/imperva_sdk/ADCUploader.py
|
import re
import requests
import os
import json
import threading
from time import sleep
from imperva_sdk.core import *
class ADCUploader(MxObject):
"""A simple example class"""
def __init__(self, connection):
self.cookies = {'JSESSIONID': str(connection._MxConnection__Headers['Cookie'])[11:],
'SSOSESSIONID': str(connection._MxConnection__Headers['Cookie'])[11:]}
self.ip = connection.Host
# for tracking the upload we need the script id.
def get_script_id(self):
headers = {
'Host': '%s:8083' % self.ip,
'Connection': 'close',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Referer': 'https://%s:8083/SecureSphere/app/' % self.ip,
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9',
'content-type': 'application/json',
}
# getting script id for later tracking
api_url = 'https://%s:8083/SecureSphere/ui/main.html' % (self.ip)
r = requests.get(api_url, cookies=self.cookies, verify=False)
regex_result = re.search(r"JAWR.dwr_scriptSessionId='([a-zA-Z0-9_.-]*)'", r.text)
# check if not found?
# No need, we believe in ourselves :)
return regex_result.group(1)
# uploading by multipart post request
def upload_adc_content(self, path):
api_url = 'https://%s:8083/SecureSphere/ui/adc_content.html' % self.ip
# secure sphere want to receive it like this.
prod = open(path, 'rb')
headers = {
'Host': '%s:8083' % self.ip,
'Connection': 'close',
'Upgrade-Insecure-Requests': '1',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Referer': 'https://%s:8083/SecureSphere/ui/main.html' % self.ip,
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9',
}
r = requests.post(api_url, files={'ADC1': prod},
cookies=self.cookies, headers=headers, verify=False)
return r.status_code == 200
# check the status of the upload, need to parse the weird json.
def check_upload_status(self, sessionid):
api_url = 'https://%s:8083/SecureSphere/dwr/call/plaincall/AsyncOperationsContainer.getOperationState.dwr' % self.ip
headers = {
'Host': '%s:8083' % self.ip,
'Connection': 'close',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Referer': 'https://%s:8083/SecureSphere/ui/main.html' % self.ip,
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9',
'Content-Type': 'text/plain'
}
data = 'callCount=1\n' \
'page=/SecureSphere/ui/main.html\n' \
'httpSessionId=\n' \
'scriptSessionId={0}\n' \
'c0-scriptName=AsyncOperationsContainer\n' \
'c0-methodName=getOperationState\n' \
'c0-id=0\n' \
'c0-param0=string:%2Fadc_content.html\n' \
'c0-param1=boolean:false\n' \
'batchId=5'.format(sessionid)
response = requests.post(api_url, headers=headers, data=data, cookies=self.cookies, verify=False)
regex_result = re.search(r"dwr\.engine\.\_remoteHandleCallback.*({.*})", response.text)
if regex_result is not None:
# split to from of: ['attributes:s0', 'childrenProgressInfo:s1'...
result_touple = re.findall(r"([a-zA-Z0-9_.-]*):([a-zA-Z0-9\" _.-]*)", regex_result.group(1))
response = {}
for tup in result_touple:
response[tup[0]] = tup[1]
return response
else:
return {}
def wait_upload_finish(self, scriptid):
status = self.check_upload_status(scriptid)
while status == {} or status['inProgress'] == 'true':
sleep(1)
status = self.check_upload_status(scriptid)
#weird secure sphere, returning true even it is false.
err_msg = ['"System is busy. Please try again later."',
'"Failed unpacking ADC content. Please verify you uploaded valid ADC content file"',
'"Failed to upload ADC content , duplicate item with full id"']
if any([status['stage'] in err for err in err_msg]):
status['success'] = 'false'
return status
def upload_adc_and_wait(self, path):
script_id = self.get_script_id()
self.upload_adc_content(path)
return self.wait_upload_finish(script_id)
def upload_adc_and_wait_multithreaded(self, path):
script_id = self.get_script_id()
upload_thread = threading.Thread(target=self.upload_adc_content, args=(path,))
status_thread = threading.Thread(target=self.wait_upload_finish, args=(script_id,))
upload_thread.start();
status_thread.start();
status_thread.join();
|
PypiClean
|
/brain_py-2.1.20-py3-none-any.whl/brainpy/math/operators/pre2post.py
|
from functools import partial
from typing import Union, Tuple
import jax.numpy as jnp
from jax import vmap, jit
from jax.lax import cond
from brainpy.errors import MathError
from brainpy.math.jaxarray import JaxArray
from brainpy.math.numpy_ops import as_device_array
from brainpy.types import Array
from .pre2syn import pre2syn
from .syn2post import syn2post_mean
from .utils import _check_brainpylib
try:
import brainpylib
except ModuleNotFoundError:
brainpylib = None
__all__ = [
# pre-to-post
'pre2post_sum',
'pre2post_prod',
'pre2post_max',
'pre2post_min',
'pre2post_mean',
# pre-to-post event operator
'pre2post_event_sum',
'pre2post_event_prod',
]
def _raise_pre_ids_is_none(pre_ids):
if pre_ids is None:
raise MathError(f'pre2post synaptic computation needs "pre_ids" '
f'when providing heterogeneous "pre_values" '
f'(brainpy.math.ndim(pre_values) != 0).')
def pre2post_event_sum(events: Array,
pre2post: Tuple[Array, Array],
post_num: int,
values: Union[float, Array] = 1.):
"""The pre-to-post synaptic computation with event-driven summation.
When ``values`` is a scalar, this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] += values
When ``values`` is a vector (with the length of ``len(post_ids)``),
this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] += values[j]
Parameters
----------
events: Array
The events, must be bool.
pre2post: tuple of Array, tuple of Array
A tuple contains the connection information of pre-to-post.
post_num: int
The number of post-synaptic group.
values: float, Array
The value to make summation.
Returns
-------
out: JaxArray, jax.numpy.ndarray
A tensor with the shape of ``post_num``.
"""
_check_brainpylib(pre2post_event_sum.__name__)
indices, idnptr = pre2post
events = as_device_array(events)
indices = as_device_array(indices)
idnptr = as_device_array(idnptr)
values = as_device_array(values)
return brainpylib.event_sum(events, (indices, idnptr), post_num, values)
def pre2post_event_sum2(events: Array,
pre2post: Tuple[Array, Array],
post_num: int,
values: Union[float, Array] = 1.):
"""The pre-to-post synaptic computation with event-driven summation.
When ``values`` is a scalar, this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] += values
When ``values`` is a vector (with the length of ``len(post_ids)``),
this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] += values[j]
Parameters
----------
events: Array
The events, must be bool.
pre2post: tuple of Array, tuple of Array
A tuple contains the connection information of pre-to-post.
post_num: int
The number of post-synaptic group.
values: float, Array
The value to make summation.
Returns
-------
out: JaxArray, jax.numpy.ndarray
A tensor with the shape of ``post_num``.
"""
_check_brainpylib(pre2post_event_sum.__name__)
indices, idnptr = pre2post
events = as_device_array(events)
indices = as_device_array(indices)
idnptr = as_device_array(idnptr)
values = as_device_array(values)
return brainpylib.event_sum2(events, (indices, idnptr), post_num, values)
def pre2post_event_prod(events, pre2post, post_num, values=1.):
"""The pre-to-post synaptic computation with event-driven production.
When ``values`` is a scalar, this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.ones(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] *= values
When ``values`` is a vector (with the length of ``len(post_ids)``),
this function is equivalent to
.. highlight:: python
.. code-block:: python
post_val = np.ones(post_num)
post_ids, idnptr = pre2post
for i in range(pre_num):
if events[i]:
for j in range(idnptr[i], idnptr[i+1]):
post_val[post_ids[i]] *= values[j]
Parameters
----------
events: JaxArray, jax.numpy.ndarray, Variable
The events, must be bool.
pre2post: tuple of JaxArray, tuple of jax.numpy.ndarray
A tuple contains the connection information of pre-to-post.
post_num: int
The number of post-synaptic group.
values: float, JaxArray, jax.numpy.ndarray
The value to make summation.
Returns
-------
out: JaxArray, jax.numpy.ndarray
A tensor with the shape of ``post_num``.
"""
_check_brainpylib(pre2post_event_prod.__name__)
indices, idnptr = pre2post
events = as_device_array(events)
indices = as_device_array(indices)
idnptr = as_device_array(idnptr)
values = as_device_array(values)
return brainpylib.event_prod(events, (indices, idnptr), post_num, values)
def pre2post_sum(pre_values, post_num, post_ids, pre_ids=None):
"""The pre-to-post synaptic summation.
This function is equivalent to:
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
for i, j in zip(pre_ids, post_ids):
post_val[j] += pre_values[pre_ids[i]]
Parameters
----------
pre_values: float, jax.numpy.ndarray, JaxArray, Variable
The pre-synaptic values.
post_ids: jax.numpy.ndarray, JaxArray
The connected post-synaptic neuron ids.
post_num: int
Output dimension. The number of post-synaptic neurons.
pre_ids: optional, jax.numpy.ndarray, JaxArray
The connected pre-synaptic neuron ids.
Returns
-------
post_val: jax.numpy.ndarray, JaxArray
The value with the size of post-synaptic neurons.
"""
out = jnp.zeros(post_num)
pre_values = as_device_array(pre_values)
post_ids = as_device_array(post_ids)
if jnp.ndim(pre_values) != 0:
_raise_pre_ids_is_none(pre_ids)
pre_ids = as_device_array(pre_ids)
pre_values = pre_values[pre_ids]
return out.at[post_ids].add(pre_values)
def pre2post_prod(pre_values, post_num, post_ids, pre_ids=None):
"""The pre-to-post synaptic production.
This function is equivalent to:
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
for i, j in zip(pre_ids, post_ids):
post_val[j] *= pre_values[pre_ids[i]]
Parameters
----------
pre_values: float, jax.numpy.ndarray, JaxArray, Variable
The pre-synaptic values.
pre_ids: jax.numpy.ndarray, JaxArray
The connected pre-synaptic neuron ids.
post_ids: jax.numpy.ndarray, JaxArray
The connected post-synaptic neuron ids.
post_num: int
Output dimension. The number of post-synaptic neurons.
Returns
-------
post_val: jax.numpy.ndarray, JaxArray
The value with the size of post-synaptic neurons.
"""
out = jnp.zeros(post_num)
pre_values = as_device_array(pre_values)
post_ids = as_device_array(post_ids)
if jnp.ndim(pre_values) != 0:
_raise_pre_ids_is_none(pre_ids)
pre_ids = as_device_array(pre_ids)
pre_values = pre_values[pre_ids]
return out.at[post_ids].multiply(pre_values)
def pre2post_min(pre_values, post_num, post_ids, pre_ids=None):
"""The pre-to-post synaptic minimization.
This function is equivalent to:
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
for i, j in zip(pre_ids, post_ids):
post_val[j] = np.minimum(post_val[j], pre_values[pre_ids[i]])
Parameters
----------
pre_values: float, jax.numpy.ndarray, JaxArray
The pre-synaptic values.
pre_ids: jax.numpy.ndarray, JaxArray
The connected pre-synaptic neuron ids.
post_ids: jax.numpy.ndarray, JaxArray
The connected post-synaptic neuron ids.
post_num: int
Output dimension. The number of post-synaptic neurons.
Returns
-------
post_val: jax.numpy.ndarray, JaxArray
The value with the size of post-synaptic neurons.
"""
out = jnp.zeros(post_num)
pre_values = as_device_array(pre_values)
post_ids = as_device_array(post_ids)
if jnp.ndim(pre_values) != 0:
_raise_pre_ids_is_none(pre_ids)
pre_ids = as_device_array(pre_ids)
pre_values = pre_values[pre_ids]
return out.at[post_ids].min(pre_values)
def pre2post_max(pre_values, post_num, post_ids, pre_ids=None):
"""The pre-to-post synaptic maximization.
This function is equivalent to:
.. highlight:: python
.. code-block:: python
post_val = np.zeros(post_num)
for i, j in zip(pre_ids, post_ids):
post_val[j] = np.maximum(post_val[j], pre_values[pre_ids[i]])
Parameters
----------
pre_values: float, jax.numpy.ndarray, JaxArray, Variable
The pre-synaptic values.
pre_ids: jax.numpy.ndarray, JaxArray
The connected pre-synaptic neuron ids.
post_ids: jax.numpy.ndarray, JaxArray
The connected post-synaptic neuron ids.
post_num: int
Output dimension. The number of post-synaptic neurons.
Returns
-------
post_val: jax.numpy.ndarray, JaxArray
The value with the size of post-synaptic neurons.
"""
out = jnp.zeros(post_num)
pre_values = as_device_array(pre_values)
post_ids = as_device_array(post_ids)
if jnp.ndim(pre_values) != 0:
_raise_pre_ids_is_none(pre_ids)
pre_ids = as_device_array(pre_ids)
pre_values = pre_values[pre_ids]
return out.at[post_ids].max(pre_values)
def pre2post_mean(pre_values, post_num, post_ids, pre_ids=None):
"""The pre-to-post synaptic mean computation.
Parameters
----------
pre_values: float, jax.numpy.ndarray, JaxArray, Variable
The pre-synaptic values.
pre_ids: jax.numpy.ndarray, JaxArray
The connected pre-synaptic neuron ids.
post_ids: jax.numpy.ndarray, JaxArray
The connected post-synaptic neuron ids.
post_num: int
Output dimension. The number of post-synaptic neurons.
Returns
-------
post_val: jax.numpy.ndarray, JaxArray
The value with the size of post-synaptic neurons.
"""
out = jnp.zeros(post_num)
pre_values = as_device_array(pre_values)
post_ids = as_device_array(post_ids)
if jnp.ndim(pre_values) == 0:
# return out.at[post_ids].set(pre_values)
return out.at[jnp.unique(post_ids)].set(pre_values)
else:
_raise_pre_ids_is_none(pre_ids)
pre_ids = as_device_array(pre_ids)
pre_values = pre2syn(pre_values, pre_ids)
return syn2post_mean(pre_values, post_ids, post_num)
def pre2post_matmul(event, conn):
event = event.value if isinstance(event, JaxArray) else event
Cl = conn[0].value if isinstance(conn[0], JaxArray) else conn[0]
Cr = conn[1].value if isinstance(conn[1], JaxArray) else conn[1]
if jnp.ndim(event) != 1:
raise ValueError(f'"event" must be a one-dimensional vector. But we got {jnp.shape(event)}')
if jnp.ndim(Cl) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cl)}')
if jnp.ndim(Cr) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cr)}')
f0 = vmap(lambda i, j: event[i] * (Cl[i] * Cr[:, j]).sum(), in_axes=(0, None))
ii = jnp.arange(Cl.shape[0])
f1 = vmap(lambda j: f0(ii, j).sum(), in_axes=(None, 0))
return f1(jnp.arange(Cr.shape[1]))
def pre2post_matmul2(event, conn):
event = event.value if isinstance(event, JaxArray) else event
Cl = conn[0].value if isinstance(conn[0], JaxArray) else conn[0]
Cr = conn[1].value if isinstance(conn[1], JaxArray) else conn[1]
if jnp.ndim(event) != 1:
raise ValueError(f'"event" must be a one-dimensional vector. But we got {jnp.shape(event)}')
if jnp.ndim(Cl) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cl)}')
if jnp.ndim(Cr) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cr)}')
f1 = vmap(lambda j: (event * (Cl * Cr[:, j]).sum(1)).sum())
return f1(jnp.arange(Cr.shape[1]))
def pre2post_matmul_mask(event, conn, mask):
event = event.value if isinstance(event, JaxArray) else event
Cl = conn[0].value if isinstance(conn[0], JaxArray) else conn[0]
Cr = conn[1].value if isinstance(conn[1], JaxArray) else conn[1]
Ml = mask[0].value if isinstance(mask[0], JaxArray) else mask[0]
Mr = mask[1].value if isinstance(mask[1], JaxArray) else mask[1]
if jnp.ndim(event) != 1:
raise ValueError(f'"event" must be a one-dimensional vector. But we got {jnp.shape(event)}')
if jnp.ndim(Cl) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cl)}')
if jnp.ndim(Cr) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cr)}')
if jnp.ndim(Mr) != 2:
raise ValueError(f'"mask" must be a two-dimensional matrix. But we got {jnp.shape(Mr)}')
if jnp.ndim(Ml) != 2:
raise ValueError(f'"mask" must be a two-dimensional matrix. But we got {jnp.shape(Ml)}')
f0 = vmap(lambda i, j: event[i] * (Cl[i] * Cr[:, j]).sum() * (Ml[i] * Mr[:, j]).sum(), in_axes=(0, None))
f1 = jit(vmap(lambda ii, j: f0(ii, j).sum(), in_axes=(None, 0)))
return f1(jnp.arange(Cl.shape[0]), jnp.arange(Cr.shape[1]))
def pre2post_matmul_mask2(event, conn, mask):
event = event.value if isinstance(event, JaxArray) else event
Cl = conn[0].value if isinstance(conn[0], JaxArray) else conn[0]
Cr = conn[1].value if isinstance(conn[1], JaxArray) else conn[1]
Ml = mask[0].value if isinstance(mask[0], JaxArray) else mask[0]
Mr = mask[1].value if isinstance(mask[1], JaxArray) else mask[1]
if jnp.ndim(event) != 1:
raise ValueError(f'"event" must be a one-dimensional vector. But we got {jnp.shape(event)}')
if jnp.ndim(Cl) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cl)}')
if jnp.ndim(Cr) != 2:
raise ValueError(f'"conn" must be a two-dimensional matrix. But we got {jnp.shape(Cr)}')
if jnp.ndim(Mr) != 2:
raise ValueError(f'"mask" must be a two-dimensional matrix. But we got {jnp.shape(Mr)}')
if jnp.ndim(Ml) != 2:
raise ValueError(f'"mask" must be a two-dimensional matrix. But we got {jnp.shape(Ml)}')
# f0 = vmap(lambda i, j: event[i] * (Cl[i] * Cr[:, j]).sum() * (Ml[i] * Mr[:, j]).sum(), in_axes=(0, None))
@partial(vmap, in_axes=(0, None))
def f0(i, j):
return cond(event[i],
lambda: cond(Ml[i] @ Mr[:, j],
lambda: (Cl[i] * Cr[:, j]).sum(),
lambda: 0.),
lambda: 0.)
ii = jnp.arange(Cl.shape[0])
jj = jnp.arange(Cr.shape[1])
# def body(_, j):
# r = f0(ii, j).sum()
# return 0, r
# _, out = scan(body, 0, jj)
# return out
f = jit(vmap(lambda j: f0(ii, j).sum()))
return f(jj)
|
PypiClean
|
/dogapi-1.11.1.tar.gz/dogapi-1.11.1/src/dogshell/tag.py
|
import sys
try:
import simplejson as json
except ImportError:
import json
from dogshell.common import report_errors, report_warnings, CommandLineClient
class TagClient(CommandLineClient):
def setup_parser(self, subparsers):
parser = subparsers.add_parser('tag', help='View and modify host tags.')
verb_parsers = parser.add_subparsers(title='Verbs')
add_parser = verb_parsers.add_parser('add', help='Add a host to one or more tags.', description='Hosts can be specified by name or id.')
add_parser.add_argument('host', help='host to add')
add_parser.add_argument('tag', help='tag to add host to (one or more, space separated)', nargs='+')
add_parser.set_defaults(func=self._add)
replace_parser = verb_parsers.add_parser('replace', help='Replace all tags with one or more new tags.', description='Hosts can be specified by name or id.')
replace_parser.add_argument('host', help='host to modify')
replace_parser.add_argument('tag', help='tag to add host to (one or more, space separated)', nargs='+')
replace_parser.set_defaults(func=self._replace)
show_parser = verb_parsers.add_parser('show', help='Show host tags.', description='Hosts can be specified by name or id.')
show_parser.add_argument('host', help='host to show (or "all" to show all tags)')
show_parser.set_defaults(func=self._show)
detach_parser = verb_parsers.add_parser('detach', help='Remove a host from all tags.', description='Hosts can be specified by name or id.')
detach_parser.add_argument('host', help='host to detach')
detach_parser.set_defaults(func=self._detach)
def _add(self, args):
self.dog.timeout = args.timeout
format = args.format
res = self.dog.add_tags(args.host, *args.tag)
report_warnings(res)
report_errors(res)
if format == 'pretty':
print("Tags for '%s':" % res['host'])
for c in res['tags']:
print(' ' + c)
elif format == 'raw':
print(json.dumps(res))
else:
for c in res['tags']:
print(c)
def _replace(self, args):
self.dog.timeout = args.timeout
format = args.format
res = self.dog.change_tags(args.host, *args.tag)
report_warnings(res)
report_errors(res)
if format == 'pretty':
print("Tags for '%s':" % res['host'])
for c in res['tags']:
print(' ' + c)
elif format == 'raw':
print(json.dumps(res))
else:
for c in res['tags']:
print(c)
def _show(self, args):
self.dog.timeout = args.timeout
format = args.format
if args.host == 'all':
res = self.dog.all_tags()
else:
res = self.dog.host_tags(args.host)
report_warnings(res)
report_errors(res)
if args.host == 'all':
if format == 'pretty':
for tag, hosts in list(res['tags'].items()):
for host in hosts:
print(tag)
print(' ' + host)
print()
elif format == 'raw':
print(json.dumps(res))
else:
for tag, hosts in list(res['tags'].items()):
for host in hosts:
print(tag + '\t' + host)
else:
if format == 'pretty':
for tag in res['tags']:
print(tag)
elif format == 'raw':
print(json.dumps(res))
else:
for tag in res['tags']:
print(tag)
def _detach(self, args):
self.dog.timeout = args.timeout
format = args.format
res = self.dog.detach_tags(args.host)
report_warnings(res)
report_errors(res)
if format == 'raw':
print(json.dumps(res))
|
PypiClean
|
/condor-ir-1.2.2.tar.gz/condor-ir-1.2.2/condor/scripts/evaluate.py
|
import collections
import json
import click
import numpy
from condor.dbutil import requires_db, find_one
from condor.models.ranking_matrix import RankingMatrix
PerformanceResult = collections.namedtuple(
'PerformanceResult',
[
'true_positives',
'true_negatives',
'false_positives',
'false_negatives',
'precision',
'recall',
'f1_score',
]
)
@click.command()
@click.argument('target')
@click.option('--limit', '-l', default=None, type=int,
help='limit the number of results to use.')
@click.option('--cosine', '-c', default=None, type=float,
help='limit the query by cosine.')
@click.option('--words', '-w', default=None, type=int,
help='limit the number of words in the query')
@click.option('--output', '-o', type=click.File('w'),
help='export a detailed performance report')
@click.option('--tabular', '-t', is_flag=True,
help='show tabular output')
@requires_db
def evaluate(db, target, limit, cosine, words, tabular, output):
"""
Evaluates a target search engine, the search engine needs to be associated
to some queries in order to be evaluated, this command mainly returns
precision and recall values for the different queries and an average of
these values at the end.
"""
ranking_matrix = find_one(db, RankingMatrix, target)
bibliography = ranking_matrix.term_document_matrix.bibliography
queries = bibliography.queries
universe = set(d.eid for d in bibliography.documents)
# We'll perform all the queries and do a mean of the f1 score
performance_results = {}
for query in queries:
if words is not None and len(query.query_string.split()) != words:
continue
results = ranking_matrix.query(
query.query_string.split(), limit=limit, cosine=cosine)
experiment = set(r.eid for r, _ in results)
truth = set(r.document.eid for r in query.results)
false_negatives = truth.difference(experiment)
true_positives = truth.intersection(experiment)
false_positives = experiment.difference(truth)
true_negatives = universe.difference(truth.union(experiment))
# Validate precision
if len(true_positives) + len(false_positives) > 0:
precision = len(true_positives) / \
(len(true_positives) + len(false_positives))
else:
precision = None
# Validate recall this might never happen
if len(true_positives) + len(false_negatives) > 0:
recall = len(true_positives) / \
(len(true_positives) + len(false_negatives))
else:
recall = None
if precision is not None and recall is not None and precision + recall > 0:
f1_score = 2 * precision * recall / (precision + recall)
else:
f1_score = None
performance_results[query.query_string] = PerformanceResult(
false_negatives=len(false_negatives),
true_positives=len(true_positives),
false_positives=len(false_positives),
true_negatives=len(true_negatives),
precision=precision,
recall=recall,
f1_score=f1_score,
)
averages = {
metric: numpy.mean([
getattr(result, metric)
for result in performance_results.values()
if getattr(result, metric) is not None
])
for metric in ('precision', 'recall', 'f1_score')
}
if output:
json.dump({
'parameters': {
'target': ranking_matrix.eid,
'queries': len(performance_results),
'cosine': cosine,
'limit': limit,
},
'averages': averages,
'results': {
q: res._asdict()
for q, res in performance_results.items()
},
}, output, indent=2)
if tabular:
click.echo('{param} {results}'.format(
param=limit or cosine or 10,
results=' '.join([str(a) for a in averages.values()])
))
else:
click.echo(json.dumps(averages, indent=2))
|
PypiClean
|
/Transcrypt-3.7.16.tar.gz/Transcrypt-3.7.16/transcrypt/demos/parcel_demo/node_modules/postcss-discard-unused/node_modules/postcss/lib/list.js
|
'use strict';
exports.__esModule = true;
/**
* Contains helpers for safely splitting lists of CSS values,
* preserving parentheses and quotes.
*
* @example
* const list = postcss.list;
*
* @namespace list
*/
var list = {
split: function split(string, separators, last) {
var array = [];
var current = '';
var split = false;
var func = 0;
var quote = false;
var escape = false;
for (var i = 0; i < string.length; i++) {
var letter = string[i];
if (quote) {
if (escape) {
escape = false;
} else if (letter === '\\') {
escape = true;
} else if (letter === quote) {
quote = false;
}
} else if (letter === '"' || letter === '\'') {
quote = letter;
} else if (letter === '(') {
func += 1;
} else if (letter === ')') {
if (func > 0) func -= 1;
} else if (func === 0) {
if (separators.indexOf(letter) !== -1) split = true;
}
if (split) {
if (current !== '') array.push(current.trim());
current = '';
split = false;
} else {
current += letter;
}
}
if (last || current !== '') array.push(current.trim());
return array;
},
/**
* Safely splits space-separated values (such as those for `background`,
* `border-radius`, and other shorthand properties).
*
* @param {string} string - space-separated values
*
* @return {string[]} split values
*
* @example
* postcss.list.space('1px calc(10% + 1px)') //=> ['1px', 'calc(10% + 1px)']
*/
space: function space(string) {
var spaces = [' ', '\n', '\t'];
return list.split(string, spaces);
},
/**
* Safely splits comma-separated values (such as those for `transition-*`
* and `background` properties).
*
* @param {string} string - comma-separated values
*
* @return {string[]} split values
*
* @example
* postcss.list.comma('black, linear-gradient(white, black)')
* //=> ['black', 'linear-gradient(white, black)']
*/
comma: function comma(string) {
var comma = ',';
return list.split(string, [comma], true);
}
};
exports.default = list;
module.exports = exports['default'];
//# sourceMappingURL=data:application/json;charset=utf8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbImxpc3QuZXM2Il0sIm5hbWVzIjpbImxpc3QiLCJzcGxpdCIsInN0cmluZyIsInNlcGFyYXRvcnMiLCJsYXN0IiwiYXJyYXkiLCJjdXJyZW50IiwiZnVuYyIsInF1b3RlIiwiZXNjYXBlIiwiaSIsImxlbmd0aCIsImxldHRlciIsImluZGV4T2YiLCJwdXNoIiwidHJpbSIsInNwYWNlIiwic3BhY2VzIiwiY29tbWEiXSwibWFwcGluZ3MiOiI7OztBQUFBOzs7Ozs7Ozs7QUFTQSxJQUFJQSxPQUFPO0FBRVBDLFNBRk8saUJBRURDLE1BRkMsRUFFT0MsVUFGUCxFQUVtQkMsSUFGbkIsRUFFeUI7QUFDNUIsWUFBSUMsUUFBVSxFQUFkO0FBQ0EsWUFBSUMsVUFBVSxFQUFkO0FBQ0EsWUFBSUwsUUFBVSxLQUFkOztBQUVBLFlBQUlNLE9BQVUsQ0FBZDtBQUNBLFlBQUlDLFFBQVUsS0FBZDtBQUNBLFlBQUlDLFNBQVUsS0FBZDs7QUFFQSxhQUFNLElBQUlDLElBQUksQ0FBZCxFQUFpQkEsSUFBSVIsT0FBT1MsTUFBNUIsRUFBb0NELEdBQXBDLEVBQTBDO0FBQ3RDLGdCQUFJRSxTQUFTVixPQUFPUSxDQUFQLENBQWI7O0FBRUEsZ0JBQUtGLEtBQUwsRUFBYTtBQUNULG9CQUFLQyxNQUFMLEVBQWM7QUFDVkEsNkJBQVMsS0FBVDtBQUNILGlCQUZELE1BRU8sSUFBS0csV0FBVyxJQUFoQixFQUF1QjtBQUMxQkgsNkJBQVMsSUFBVDtBQUNILGlCQUZNLE1BRUEsSUFBS0csV0FBV0osS0FBaEIsRUFBd0I7QUFDM0JBLDRCQUFRLEtBQVI7QUFDSDtBQUNKLGFBUkQsTUFRTyxJQUFLSSxXQUFXLEdBQVgsSUFBa0JBLFdBQVcsSUFBbEMsRUFBeUM7QUFDNUNKLHdCQUFRSSxNQUFSO0FBQ0gsYUFGTSxNQUVBLElBQUtBLFdBQVcsR0FBaEIsRUFBc0I7QUFDekJMLHdCQUFRLENBQVI7QUFDSCxhQUZNLE1BRUEsSUFBS0ssV0FBVyxHQUFoQixFQUFzQjtBQUN6QixvQkFBS0wsT0FBTyxDQUFaLEVBQWdCQSxRQUFRLENBQVI7QUFDbkIsYUFGTSxNQUVBLElBQUtBLFNBQVMsQ0FBZCxFQUFrQjtBQUNyQixvQkFBS0osV0FBV1UsT0FBWCxDQUFtQkQsTUFBbkIsTUFBK0IsQ0FBQyxDQUFyQyxFQUF5Q1gsUUFBUSxJQUFSO0FBQzVDOztBQUVELGdCQUFLQSxLQUFMLEVBQWE7QUFDVCxvQkFBS0ssWUFBWSxFQUFqQixFQUFzQkQsTUFBTVMsSUFBTixDQUFXUixRQUFRUyxJQUFSLEVBQVg7QUFDdEJULDBCQUFVLEVBQVY7QUFDQUwsd0JBQVUsS0FBVjtBQUNILGFBSkQsTUFJTztBQUNISywyQkFBV00sTUFBWDtBQUNIO0FBQ0o7O0FBRUQsWUFBS1IsUUFBUUUsWUFBWSxFQUF6QixFQUE4QkQsTUFBTVMsSUFBTixDQUFXUixRQUFRUyxJQUFSLEVBQVg7QUFDOUIsZUFBT1YsS0FBUDtBQUNILEtBM0NNOzs7QUE2Q1A7Ozs7Ozs7Ozs7O0FBV0FXLFNBeERPLGlCQXdERGQsTUF4REMsRUF3RE87QUFDVixZQUFJZSxTQUFTLENBQUMsR0FBRCxFQUFNLElBQU4sRUFBWSxJQUFaLENBQWI7QUFDQSxlQUFPakIsS0FBS0MsS0FBTCxDQUFXQyxNQUFYLEVBQW1CZSxNQUFuQixDQUFQO0FBQ0gsS0EzRE07OztBQTZEUDs7Ozs7Ozs7Ozs7O0FBWUFDLFNBekVPLGlCQXlFRGhCLE1BekVDLEVBeUVPO0FBQ1YsWUFBSWdCLFFBQVEsR0FBWjtBQUNBLGVBQU9sQixLQUFLQyxLQUFMLENBQVdDLE1BQVgsRUFBbUIsQ0FBQ2dCLEtBQUQsQ0FBbkIsRUFBNEIsSUFBNUIsQ0FBUDtBQUNIO0FBNUVNLENBQVg7O2tCQWdGZWxCLEkiLCJmaWxlIjoibGlzdC5qcyIsInNvdXJjZXNDb250ZW50IjpbIi8qKlxuICogQ29udGFpbnMgaGVscGVycyBmb3Igc2FmZWx5IHNwbGl0dGluZyBsaXN0cyBvZiBDU1MgdmFsdWVzLFxuICogcHJlc2VydmluZyBwYXJlbnRoZXNlcyBhbmQgcXVvdGVzLlxuICpcbiAqIEBleGFtcGxlXG4gKiBjb25zdCBsaXN0ID0gcG9zdGNzcy5saXN0O1xuICpcbiAqIEBuYW1lc3BhY2UgbGlzdFxuICovXG5sZXQgbGlzdCA9IHtcblxuICAgIHNwbGl0KHN0cmluZywgc2VwYXJhdG9ycywgbGFzdCkge1xuICAgICAgICBsZXQgYXJyYXkgICA9IFtdO1xuICAgICAgICBsZXQgY3VycmVudCA9ICcnO1xuICAgICAgICBsZXQgc3BsaXQgICA9IGZhbHNlO1xuXG4gICAgICAgIGxldCBmdW5jICAgID0gMDtcbiAgICAgICAgbGV0IHF1b3RlICAgPSBmYWxzZTtcbiAgICAgICAgbGV0IGVzY2FwZSAgPSBmYWxzZTtcblxuICAgICAgICBmb3IgKCBsZXQgaSA9IDA7IGkgPCBzdHJpbmcubGVuZ3RoOyBpKysgKSB7XG4gICAgICAgICAgICBsZXQgbGV0dGVyID0gc3RyaW5nW2ldO1xuXG4gICAgICAgICAgICBpZiAoIHF1b3RlICkge1xuICAgICAgICAgICAgICAgIGlmICggZXNjYXBlICkge1xuICAgICAgICAgICAgICAgICAgICBlc2NhcGUgPSBmYWxzZTtcbiAgICAgICAgICAgICAgICB9IGVsc2UgaWYgKCBsZXR0ZXIgPT09ICdcXFxcJyApIHtcbiAgICAgICAgICAgICAgICAgICAgZXNjYXBlID0gdHJ1ZTtcbiAgICAgICAgICAgICAgICB9IGVsc2UgaWYgKCBsZXR0ZXIgPT09IHF1b3RlICkge1xuICAgICAgICAgICAgICAgICAgICBxdW90ZSA9IGZhbHNlO1xuICAgICAgICAgICAgICAgIH1cbiAgICAgICAgICAgIH0gZWxzZSBpZiAoIGxldHRlciA9PT0gJ1wiJyB8fCBsZXR0ZXIgPT09ICdcXCcnICkge1xuICAgICAgICAgICAgICAgIHF1b3RlID0gbGV0dGVyO1xuICAgICAgICAgICAgfSBlbHNlIGlmICggbGV0dGVyID09PSAnKCcgKSB7XG4gICAgICAgICAgICAgICAgZnVuYyArPSAxO1xuICAgICAgICAgICAgfSBlbHNlIGlmICggbGV0dGVyID09PSAnKScgKSB7XG4gICAgICAgICAgICAgICAgaWYgKCBmdW5jID4gMCApIGZ1bmMgLT0gMTtcbiAgICAgICAgICAgIH0gZWxzZSBpZiAoIGZ1bmMgPT09IDAgKSB7XG4gICAgICAgICAgICAgICAgaWYgKCBzZXBhcmF0b3JzLmluZGV4T2YobGV0dGVyKSAhPT0gLTEgKSBzcGxpdCA9IHRydWU7XG4gICAgICAgICAgICB9XG5cbiAgICAgICAgICAgIGlmICggc3BsaXQgKSB7XG4gICAgICAgICAgICAgICAgaWYgKCBjdXJyZW50ICE9PSAnJyApIGFycmF5LnB1c2goY3VycmVudC50cmltKCkpO1xuICAgICAgICAgICAgICAgIGN1cnJlbnQgPSAnJztcbiAgICAgICAgICAgICAgICBzcGxpdCAgID0gZmFsc2U7XG4gICAgICAgICAgICB9IGVsc2Uge1xuICAgICAgICAgICAgICAgIGN1cnJlbnQgKz0gbGV0dGVyO1xuICAgICAgICAgICAgfVxuICAgICAgICB9XG5cbiAgICAgICAgaWYgKCBsYXN0IHx8IGN1cnJlbnQgIT09ICcnICkgYXJyYXkucHVzaChjdXJyZW50LnRyaW0oKSk7XG4gICAgICAgIHJldHVybiBhcnJheTtcbiAgICB9LFxuXG4gICAgLyoqXG4gICAgICogU2FmZWx5IHNwbGl0cyBzcGFjZS1zZXBhcmF0ZWQgdmFsdWVzIChzdWNoIGFzIHRob3NlIGZvciBgYmFja2dyb3VuZGAsXG4gICAgICogYGJvcmRlci1yYWRpdXNgLCBhbmQgb3RoZXIgc2hvcnRoYW5kIHByb3BlcnRpZXMpLlxuICAgICAqXG4gICAgICogQHBhcmFtIHtzdHJpbmd9IHN0cmluZyAtIHNwYWNlLXNlcGFyYXRlZCB2YWx1ZXNcbiAgICAgKlxuICAgICAqIEByZXR1cm4ge3N0cmluZ1tdfSBzcGxpdCB2YWx1ZXNcbiAgICAgKlxuICAgICAqIEBleGFtcGxlXG4gICAgICogcG9zdGNzcy5saXN0LnNwYWNlKCcxcHggY2FsYygxMCUgKyAxcHgpJykgLy89PiBbJzFweCcsICdjYWxjKDEwJSArIDFweCknXVxuICAgICAqL1xuICAgIHNwYWNlKHN0cmluZykge1xuICAgICAgICBsZXQgc3BhY2VzID0gWycgJywgJ1xcbicsICdcXHQnXTtcbiAgICAgICAgcmV0dXJuIGxpc3Quc3BsaXQoc3RyaW5nLCBzcGFjZXMpO1xuICAgIH0sXG5cbiAgICAvKipcbiAgICAgKiBTYWZlbHkgc3BsaXRzIGNvbW1hLXNlcGFyYXRlZCB2YWx1ZXMgKHN1Y2ggYXMgdGhvc2UgZm9yIGB0cmFuc2l0aW9uLSpgXG4gICAgICogYW5kIGBiYWNrZ3JvdW5kYCBwcm9wZXJ0aWVzKS5cbiAgICAgKlxuICAgICAqIEBwYXJhbSB7c3RyaW5nfSBzdHJpbmcgLSBjb21tYS1zZXBhcmF0ZWQgdmFsdWVzXG4gICAgICpcbiAgICAgKiBAcmV0dXJuIHtzdHJpbmdbXX0gc3BsaXQgdmFsdWVzXG4gICAgICpcbiAgICAgKiBAZXhhbXBsZVxuICAgICAqIHBvc3Rjc3MubGlzdC5jb21tYSgnYmxhY2ssIGxpbmVhci1ncmFkaWVudCh3aGl0ZSwgYmxhY2spJylcbiAgICAgKiAvLz0+IFsnYmxhY2snLCAnbGluZWFyLWdyYWRpZW50KHdoaXRlLCBibGFjayknXVxuICAgICAqL1xuICAgIGNvbW1hKHN0cmluZykge1xuICAgICAgICBsZXQgY29tbWEgPSAnLCc7XG4gICAgICAgIHJldHVybiBsaXN0LnNwbGl0KHN0cmluZywgW2NvbW1hXSwgdHJ1ZSk7XG4gICAgfVxuXG59O1xuXG5leHBvcnQgZGVmYXVsdCBsaXN0O1xuIl19
|
PypiClean
|
/askbot-tuan-1.5.tar.gz/askbot-tuan-1.5/askbot/setup_templates/static/tiny_mce/plugins/table/langs/kl_dlg.js
|
tinyMCE.addI18n('kl.table_dlg',{"rules_border":"border","rules_box":"box","rules_vsides":"vsides","rules_rhs":"rhs","rules_lhs":"lhs","rules_hsides":"hsides","rules_below":"below","rules_above":"above","rules_void":"void",rules:"Rules","frame_all":"all","frame_cols":"cols","frame_rows":"rows","frame_groups":"groups","frame_none":"none",frame:"Frame",caption:"Table caption","missing_scope":"Are you sure you want to continue without specifying a scope for this table header cell. Without it, it may be difficult for some users with disabilities to understand the content or data displayed of the table.","cell_limit":"You\'ve exceeded the maximum number of cells of {$cells}.","row_limit":"You\'ve exceeded the maximum number of rows of {$rows}.","col_limit":"You\'ve exceeded the maximum number of columns of {$cols}.",colgroup:"Col Group",rowgroup:"Row Group",scope:"Scope",tfoot:"Table Foot",tbody:"Table Body",thead:"Table Head","row_all":"Update all rows in table","row_even":"Update even rows in table","row_odd":"Update odd rows in table","row_row":"Update current row","cell_all":"Update all cells in table","cell_row":"Update all cells in row","cell_cell":"Update current cell",th:"Header",td:"Data",summary:"Summary",bgimage:"Background image",rtl:"Right to left",ltr:"Left to right",mime:"Target MIME type",langcode:"Language code",langdir:"Language direction",style:"Style",id:"Id","merge_cells_title":"Merge table cells",bgcolor:"Background color",bordercolor:"Border color","align_bottom":"Bottom","align_top":"Top",valign:"Vertical alignment","cell_type":"Cell type","cell_title":"Table cell properties","row_title":"Table row properties","align_middle":"Center","align_right":"Right","align_left":"Left","align_default":"Default",align:"Alignment",border:"Border",cellpadding:"Cellpadding",cellspacing:"Cellspacing",rows:"Rows",cols:"Cols",height:"Height",width:"Width",title:"Insert/Modify table",rowtype:"Row in table part","advanced_props":"Advanced properties","general_props":"General properties","advanced_tab":"Advanced","general_tab":"General","cell_col":"Update all cells in column"});
|
PypiClean
|
/counter_caller-2.0.3rc5-py3-none-any.whl/counter_caller/bower_components/howler.js/LICENSE.md
|
Copyright (c) 2013-2014 James Simpson and GoldFire Studios, Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
PypiClean
|
/tw.dojo-0.9.181.tar.gz/tw.dojo-0.9.181/tw/dojo/static/1.8.1/min/dojox/highlight/languages/xquery.js.uncompressed.js
|
define("dojox/highlight/languages/xquery", ["../_base"], function(dh){
// Very simple XQuery language file. Would be nice
// to eventually handle more of the enclosed expressions
// and direct XML element construction
var XQUERY_COMMENT = {
className: 'comment',
begin: '\\(\\:', end: '\\:\\)'
};
var XQUERY_KEYWORDS = {
// From section A2.2 of the XQuery 1.0 specification
'ancestor': 1, 'ancestor-or-self': 1, 'and' : 1,
'as': 1, 'ascending': 1, 'at': 1, 'attribute': 1,
'base-uri': 1, 'boundary-space': 1, 'by': 1, 'case': 1,
'cast': 1, 'castable': 1, 'child': 1, 'collation': 1,
'comment': 1, 'construction': 1, 'copy-namespaces': 1,
'declare': 1, 'default': 1, 'descendant': 1, 'descendant-or-self': 1,
'descending': 1, 'div': 1, 'document': 1, 'document-node': 1,
'element': 1, 'else': 1, 'empty': 1, 'empty-sequence': 1,
'encoding': 1, 'eq': 1, 'every': 1, 'except': 1, 'external': 1,
'following': 1, 'following-sibling': 1, 'for': 1, 'function': 1,
'ge': 1, 'greatest': 1, 'gt': 1, 'idiv': 1, 'if': 1, 'import': 1,
'in': 1, 'inherit': 1, 'instance': 1, 'intersect': 1, 'is': 1,
'item': 1, 'lax': 1, 'le': 1, 'least': 1, 'let': 1, 'lt': 1,
'mod': 1, 'module': 1, 'namespace': 1, 'ne': 1, 'node': 1,
'no-inherit': 1, 'no-preserve': 1, 'of': 1, 'option': 1, 'or': 1,
'order': 1, 'ordered': 1, 'ordering': 1, 'parent': 1,
'preceding': 1, 'preceding-sibling': 1, 'preserve': 1,
'processing-instruction': 1, 'return': 1, 'satisfies': 1,
'schema': 1, 'schema-attribute': 1, 'schema-element': 1,
'self': 1, 'some': 1, 'stable': 1, 'strict': 1, 'strip': 1,
'text': 1, 'then': 1, 'to': 1, 'treat': 1, 'typeswitch': 1,
'union': 1, 'unordered': 1, 'validate': 1, 'variable': 1,
'version': 1, 'where': 1, 'xquery': 1
};
var dhc = dh.constants;
dh.languages.xquery = {
case_insensitive: true,
defaultMode: {
lexems: [dhc.IDENT_RE],
contains: ['string', 'number', 'comment'],
keywords: {
'keyword': XQUERY_KEYWORDS
}
},
modes: [
XQUERY_COMMENT
],
XQUERY_COMMENT: XQUERY_COMMENT
};
return dh.languages.xquery;
});
|
PypiClean
|
/aifs_nni-1.9.5-py3-none-manylinux1_x86_64.whl/aifs_nni-1.9.5.data/data/nni/node_modules/node-jose/lib/algorithms/rsassa.js
|
"use strict";
var forge = require("../deps/forge.js"),
CONSTANTS = require("./constants"),
helpers = require("./helpers.js"),
rsaUtil = require("./rsa-util.js");
function nodePSSsupport() {
return helpers.nodeCrypto && helpers.nodeCrypto.constants && helpers.nodeCrypto.constants.RSA_PSS_SALTLEN_DIGEST;
}
// ### RSASSA-PKCS1-v1_5
function rsassaV15SignFn(name) {
var md = name.replace("RS", "SHA").toLowerCase(),
hash = name.replace("RS", "SHA-");
var alg = {
name: "RSASSA-PKCS1-V1_5",
hash: {
name: hash
}
};
// ### Fallback Implementation -- uses forge
var fallback = function(key, pdata) {
// create the digest
var digest = forge.md[md].create();
digest.start();
digest.update(pdata);
// sign it
var pki = rsaUtil.convertToForge(key, false);
var sig = pki.sign(digest, "RSASSA-PKCS1-V1_5");
sig = Buffer.from(sig, "binary");
return Promise.resolve({
data: pdata,
mac: sig
});
};
// ### WebCryptoAPI Implementation
var webcrypto = function(key, pdata) {
key = rsaUtil.convertToJWK(key, false);
var promise;
promise = helpers.subtleCrypto.importKey("jwk", key, alg, true, ["sign"]);
promise = promise.then(function(key) {
return helpers.subtleCrypto.sign(alg, key, pdata);
});
promise = promise.then(function(result) {
var sig = Buffer.from(result);
return {
data: pdata,
mac: sig
};
});
return promise;
};
var nodejs;
var nodeHash = "RSA-" + hash.replace("-", "");
if (helpers.nodeCrypto && helpers.nodeCrypto.getHashes().indexOf(nodeHash) > -1) {
nodejs = function(key, pdata) {
var sign = helpers.nodeCrypto.createSign(nodeHash);
sign.update(pdata);
return {
data: pdata,
mac: sign.sign(rsaUtil.convertToPem(key, false))
};
};
}
return helpers.setupFallback(nodejs, webcrypto, fallback);
}
function rsassaV15VerifyFn(name) {
var md = name.replace("RS", "SHA").toLowerCase(),
hash = name.replace("RS", "SHA-");
var alg = {
name: "RSASSA-PKCS1-V1_5",
hash: {
name: hash
}
};
// ### Fallback implementation -- uses forge
var fallback = function(key, pdata, mac) {
// create the digest
var digest = forge.md[md].create();
digest.start();
digest.update(pdata);
digest = digest.digest().bytes();
// verify it
var pki = rsaUtil.convertToForge(key, true);
var sig = mac.toString("binary");
var result = pki.verify(digest, sig, "RSASSA-PKCS1-V1_5");
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return Promise.resolve({
data: pdata,
mac: mac,
valid: true
});
};
// ### WebCryptoAPI Implementation
var webcrypto = function(key, pdata, mac) {
key = rsaUtil.convertToJWK(key, true);
var promise;
promise = helpers.subtleCrypto.importKey("jwk", key, alg, true, ["verify"]);
promise = promise.then(function(key) {
return helpers.subtleCrypto.verify(alg, key, mac, pdata);
});
promise = promise.then(function(result) {
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return {
data: pdata,
mac: mac,
valid: true
};
});
return promise;
};
var nodejs;
if (helpers.nodeCrypto && helpers.nodeCrypto.getHashes().indexOf(md) > -1) {
nodejs = function(key, pdata, mac) {
var verify = helpers.nodeCrypto.createVerify(md);
verify.update(pdata);
verify.end();
var result = verify.verify(rsaUtil.convertToPem(key, true), mac);
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return {
data: pdata,
mac: mac,
valid: true
};
};
}
return helpers.setupFallback(nodejs, webcrypto, fallback);
}
// ### RSA-PSS
function rsassaPssSignFn(name) {
var md = name.replace("PS", "SHA").toLowerCase(),
hash = name.replace("PS", "SHA-");
var alg = {
name: "RSA-PSS",
hash: {
name: hash
},
saltLength: CONSTANTS.HASHLENGTH[hash] / 8
};
// ### Fallback implementation -- uses forge
var fallback = function (key, pdata) {
// create the digest
var digest = forge.md[md].create();
digest.start();
digest.update(pdata);
// setup padding
var pss = forge.pss.create({
md: forge.md[md].create(),
mgf: forge.mgf.mgf1.create(forge.md[md].create()),
saltLength: CONSTANTS.HASHLENGTH[hash] / 8
});
// sign it
var pki = rsaUtil.convertToForge(key, false);
var sig = pki.sign(digest, pss);
sig = Buffer.from(sig, "binary");
return Promise.resolve({
data: pdata,
mac: sig
});
};
// ### WebCryptoAPI Implementation
var webcrypto = function(key, pdata) {
key = rsaUtil.convertToJWK(key, false);
var promise;
promise = helpers.subtleCrypto.importKey("jwk", key, alg, true, ["sign"]);
promise = promise.then(function (key) {
return helpers.subtleCrypto.sign(alg, key, pdata);
});
promise = promise.then(function (result) {
var sig = Buffer.from(result);
return {
data: pdata,
mac: sig
};
});
return promise;
};
var nodejs;
var nodeHash = "RSA-" + hash.replace("-", "");
if (nodePSSsupport()) {
nodejs = function(key, pdata) {
var sign = helpers.nodeCrypto.createSign(nodeHash);
sign.update(pdata);
var sig = sign.sign({
key: rsaUtil.convertToPem(key, false),
padding: helpers.nodeCrypto.constants.RSA_PKCS1_PSS_PADDING,
saltLength: helpers.nodeCrypto.constants.RSA_PSS_SALTLEN_DIGEST
});
return {
data: pdata,
mac: sig
};
};
}
return helpers.setupFallback(nodejs, webcrypto, fallback);
}
function rsassaPssVerifyFn(name) {
var md = name.replace("PS", "SHA").toLowerCase(),
hash = name.replace("PS", "SHA-");
var alg = {
name: "RSA-PSS",
hash: {
name: hash
},
saltLength: CONSTANTS.HASHLENGTH[hash] / 8
};
// ### Fallback implementation -- uses forge
var fallback = function (key, pdata, mac) {
// create the digest
var digest = forge.md[md].create();
digest.start();
digest.update(pdata);
digest = digest.digest().bytes();
// setup padding
var pss = forge.pss.create({
md: forge.md[md].create(),
mgf: forge.mgf.mgf1.create(forge.md[md].create()),
saltLength: CONSTANTS.HASHLENGTH[hash] / 8
});
// verify it
var pki = rsaUtil.convertToForge(key, true);
var sig = mac.toString("binary");
var result = pki.verify(digest, sig, pss);
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return Promise.resolve({
data: pdata,
mac: mac,
valid: true
});
};
// ### WebCryptoAPI Implementation
var webcrypto = function(key, pdata, mac) {
key = rsaUtil.convertToJWK(key, true);
var promise;
promise = helpers.subtleCrypto.importKey("jwk", key, alg, true, ["verify"]);
promise = promise.then(function (key) {
return helpers.subtleCrypto.verify(alg, key, mac, pdata);
});
promise = promise.then(function (result) {
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return {
data: pdata,
mac: mac,
valid: true
};
});
return promise;
};
var nodejs;
if (nodePSSsupport()) {
nodejs = function(key, pdata, mac) {
var verify = helpers.nodeCrypto.createVerify(md);
verify.update(pdata);
verify.end();
var result = verify.verify({
key: rsaUtil.convertToPem(key, true),
padding: helpers.nodeCrypto.constants.RSA_PKCS1_PSS_PADDING,
saltLength: helpers.nodeCrypto.constants.RSA_PSS_SALTLEN_DIGEST
}, mac);
if (!result) {
return Promise.reject(new Error("verification failed"));
}
return {
data: pdata,
mac: mac,
valid: true
};
};
}
return helpers.setupFallback(nodejs, webcrypto, fallback);
}
// ### Public API
// * [name].sign
// * [name].verify
var rsassa = {};
[
"PS256",
"PS384",
"PS512"
].forEach(function(name) {
rsassa[name] = {
sign: rsassaPssSignFn(name),
verify: rsassaPssVerifyFn(name)
};
});
[
"RS256",
"RS384",
"RS512"
].forEach(function(name) {
rsassa[name] = {
sign: rsassaV15SignFn(name),
verify: rsassaV15VerifyFn(name)
};
});
module.exports = rsassa;
|
PypiClean
|
/online_node2vec-0.1.1-py3-none-any.whl/online_node2vec/online/w2v_learners.py
|
from gensim.models import Word2Vec
from .npw2v import NPWord2Vec
import pandas as pd
import numpy as np
from collections import Counter, deque
class Word2VecBase():
def __init__(self):
self.all_words = None
self.model = None
def set_all_words(self, all_words):
self.all_words = all_words
def get_rank(self, src, trg, top_k):
raise RuntimeError("Implement this method for each subclass!")
def export_embeddings(self, file_name, nbunch=None, decay_information=None):
""""Export online word2vec features"""
#print(file_name)
if self.model == None:
with open(file_name, 'w') as f:
f.write("No word2vec model is available! No training instances were recieved.")
else:
embeddings = self.get_embeddings()
if nbunch != None:
embeddings = embeddings[embeddings['index'].isin(nbunch)]
if decay_information != None:
now, c, node_last_update = decay_information
embeddings = embeddings.set_index('index')
decays = []
for node_id in list(embeddings.index):
decay = np.exp(-c*(now-node_last_update[node_id]))
decays.append(decay)
decays_reshaped = np.array(decays).reshape(len(decays),1)
embeddings = decays_reshaped * embeddings
embeddings = embeddings.reset_index()
embeddings.to_csv(file_name, index=False, header=False)
class OnlineWord2Vec(Word2VecBase):
"""
Custom Word2Vec wrapper for online representation learning
Parameters
----------
embedding_dims : int
Dimensions of the representation
lr_rate : float
Learning rate
neg_rate: int
Negative rate
n_threads: int
Maximum number of threads for parallelization
loss: square/logsigmoid
Choose loss type
mirror:
Feed sampled node pairs in both order to the learner
onlymirror:
Feed sampled node pairs only in reverse order to the learner
init: uniform/gensim
Choose method for embedding initialization
exportW1: bool
Select representation matrix
temporal_noise: bool
Enable temporal node activity based negative sampling
interval: int
Synchronization window in seconds in case of temporal noise
use_pairs: bool
Input is fed as node pairs instead of node sequences
window: int
Window parameter in case of node sequence input
uniform_ratio: float
Fraction of uniform random negative samples. Remaining negative samples are chosen from past positive training instances.
"""
def __init__(self, embedding_dims=128, lr_rate=0.01, neg_rate=10, loss="square", mirror=True, onlymirror=False, init="uniform", exportW1=True, interval=86400, temporal_noise=False, window=2, use_pairs=True, uniform_ratio=1.0):
"""Custom online Word2Vec model wrapper"""
self.embedding_dims = embedding_dims
self.lr_rate = lr_rate
self.neg_rate = neg_rate
self.uniform_ratio = uniform_ratio
self.loss = loss
self.mirror = mirror
self.onlymirror = onlymirror
self.init = init
self.exportW1 = exportW1
self.interval = interval
self.temporal_noise = temporal_noise
self.window = window
self.use_pairs = use_pairs
super(OnlineWord2Vec, self).__init__()
def __str__(self):
return "onlinew2v_dim%i_lr%0.4f_neg%i_uratio%.2f_%s_mirror%s_om%s_init%s_expW1%s_i%i_tn%s_win%i_pairs%s" % (self.embedding_dims, self.lr_rate, self.neg_rate, self.uniform_ratio, self.loss, self.mirror, self.onlymirror, self.init, self.exportW1, self.interval, self.temporal_noise, self.window, self.use_pairs)
def init_model(self, time):
self.last_update = time
self.appearences = []
if self.all_words == None:
raise RuntimeError("'all_words' must be set before initialization!")
self.model = NPWord2Vec(self.embedding_dims, learning_rate=self.lr_rate, negative_rate=self.neg_rate, loss=self.loss, window=self.window, mirror=self.mirror, onlymirror=self.onlymirror, init=self.init, uniform_ratio=self.uniform_ratio, exportW1=self.exportW1)
self.model.set_vocabulary(self.all_words)
def partial_fit(self, sentences, time):
"""Note: learning rate is fixed during online training."""
if self.model == None:
self.init_model(time)
#refresh noise
time_diff = time - self.last_update
if time_diff > self.interval:
print("Updating noise with %i records" % len(self.appearences))
self.model.update_noise_dist(self.appearences)
self.last_update += self.interval
if self.temporal_noise:
self.appearences = []
# update model
if self.use_pairs:
# sentences are node pairs
for (a, b) in sentences:
self.appearences += [a,b]
self.model.train_pairs(sentences)
else:
# sentences are node sequences
for sentence in sentences:
self.appearences += sentence
self.model.train_sentence(sentence)
def add_edge(self, src, trg, time):
if self.model == None:
self.init_model(time)
self.model.add(src, trg)
def get_embeddings(self):
W, vocab_code_map = self.model.get_embed()
reverse_map = {v:k for k,v in vocab_code_map.items()}
embeddings = pd.DataFrame(W).reset_index()
embeddings['index'] = embeddings['index'].map(reverse_map)
return embeddings
class GensimWord2Vec(Word2VecBase):
"""
gensim.Word2Vec wrapper for online representation learning
Parameters
----------
embedding_dims : int
Dimensions of the representation
lr_rate : float
Learning rate
sg: 0/1
Use skip-gram model
neg_rate: int
Negative rate
n_threads: int
Maximum number of threads for parallelization
"""
def __init__(self, embedding_dims=128, lr_rate=0.01, sg=1, neg_rate=10, n_threads=4):
self.embedding_dims = embedding_dims
self.lr_rate = lr_rate
self.sg = sg
self.neg_rate = neg_rate
self.n_threads = n_threads
self.num_epochs = 1
self.closest_ids = {}
self.embeddings = None
super(GensimWord2Vec, self).__init__()
def get_closest_ids(self, src, topk):
src_vec = self.embeddings[src]
id_list = np.array([idx for idx in self.embeddings.keys() if idx!=src])
vec_dot = np.array([src_vec.dot(self.embeddings[idx]) for idx in id_list])
#argsort in descending order, topk values
needed_id_places = np.argsort(vec_dot)[::-1][:topk]
#ids
self.closest_ids[src] = list(id_list[needed_id_places])
def __str__(self):
return "gensimw2v_dim%i_lr%0.4f_neg%i_sg%i" % (self.embedding_dims, self.lr_rate, self.neg_rate, self.sg)
def partial_fit(self, sentences, time=None):
"""Note: learning rate is fixed during online training. Time parameter is not used!"""
if self.model == None:
if self.all_words == None:
raise RuntimeError("'all_words' must be set before initialization!")
if self.neg_rate < 0:
self.model = Word2Vec(sentences, min_count=1, vector_size=self.embedding_dims, window=1, alpha=self.lr_rate, min_alpha=self.lr_rate, sg=self.sg, negative=0, hs=1, epochs=self.num_epochs, workers=self.n_threads) #hierarchical softmax
else:
self.model = Word2Vec(sentences, min_count=1, vector_size=self.embedding_dims, window=1, alpha=self.lr_rate, min_alpha=self.lr_rate, sg=self.sg, negative=self.neg_rate, epochs=self.num_epochs, workers=self.n_threads)
# update model
self.model.build_vocab(sentences, update=True)
self.model.train(sentences, epochs=self.num_epochs, total_words=len(self.all_words))
self.embeddings = self.get_embedding_vectors()
self.closest_ids = {}
def get_embedding_vectors(self):
vectors = self.model.wv.vectors
indices = self.model.wv.index_to_key
embeddings = {indices[i]:vectors[i] for i in range(len(indices))}
return embeddings
def get_embeddings(self):
vectors = self.model.wv.vectors
embeddings = pd.DataFrame(vectors).reset_index()
embeddings['index'] = self.model.wv.index_to_key
return embeddings
|
PypiClean
|
/merlin-sok-1.2.0.tar.gz/merlin-sok-1.2.0/third_party/json/doc/mkdocs/docs/home/exceptions.md
|
# Exceptions
## Overview
### Base type
All exceptions inherit from class `json::exception` (which in turn inherits from `std::exception`). It is used as the base class for all exceptions thrown by the `basic_json` class. This class can hence be used as "wildcard" to catch exceptions.
```plantuml
std::exception <|-- json::exception
json::exception <|-- json::parse_error
json::exception <|-- json::invalid_iterator
json::exception <|-- json::type_error
json::exception <|-- json::out_of_range
json::exception <|-- json::other_error
interface std::exception {}
class json::exception {
+ const int id
+ const char* what() const
}
class json::parse_error {
+ const std::size_t byte
}
```
### Switch off exceptions
Exceptions are used widely within the library. They can, however, be switched off with either using the compiler flag `-fno-exceptions` or by defining the symbol `JSON_NOEXCEPTION`. In this case, exceptions are replaced by `abort()` calls. You can further control this behavior by defining `JSON_THROW_USER` (overriding `#!cpp throw`), `JSON_TRY_USER` (overriding `#!cpp try`), and `JSON_CATCH_USER` (overriding `#!cpp catch`).
Note that `JSON_THROW_USER` should leave the current scope (e.g., by throwing or aborting), as continuing after it may yield undefined behavior.
??? example
The code below switches off exceptions and creates a log entry with a detailed error message in case of errors.
```cpp
#include <iostream>
#define JSON_TRY_USER if(true)
#define JSON_CATCH_USER(exception) if(false)
#define JSON_THROW_USER(exception) \
{std::clog << "Error in " << __FILE__ << ":" << __LINE__ \
<< " (function " << __FUNCTION__ << ") - " \
<< (exception).what() << std::endl; \
std::abort();}
#include <nlohmann/json.hpp>
```
## Parse errors
This exception is thrown by the library when a parse error occurs. Parse errors
can occur during the deserialization of JSON text, CBOR, MessagePack, as well
as when using JSON Patch.
Exceptions have ids 1xx.
!!! info "Byte index"
Member `byte` holds the byte index of the last read character in the input
file.
For an input with n bytes, 1 is the index of the first character and n+1
is the index of the terminating null byte or the end of file. This also
holds true when reading a byte vector (CBOR or MessagePack).
??? example
The following code shows how a `parse_error` exception can be caught.
```cpp
--8<-- "examples/parse_error.cpp"
```
Output:
```
--8<-- "examples/parse_error.output"
```
### json.exception.parse_error.101
This error indicates a syntax error while deserializing a JSON text. The error message describes that an unexpected token (character) was encountered, and the member `byte` indicates the error position.
!!! failure "Example message"
Input ended prematurely:
```
[json.exception.parse_error.101] parse error at 2: unexpected end of input; expected string literal
```
No input:
```
[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal
```
Control character was not escaped:
```
[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0009 (HT) must be escaped to \u0009 or \\; last read: '"<U+0009>'"
```
String was not closed:
```
[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: missing closing quote; last read: '"'
```
Invalid number format:
```
[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E'
```
`\u` was not be followed by four hex digits:
```
[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\u' must be followed by 4 hex digits; last read: '"\u01"'
```
Invalid UTF-8 surrogate pair:
```
[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF; last read: '"\uD7FF\uDC00'"
```
Invalid UTF-8 byte:
```
[json.exception.parse_error.101] parse error at line 3, column 24: syntax error while parsing value - invalid string: ill-formed UTF-8 byte; last read: '"vous \352t'
```
!!! tip
- Make sure the input is correctly read. Try to write the input to standard output to check if, for instance, the input file was successfully openened.
- Paste the input to a JSON validator like <http://jsonlint.com> or a tool like [jq](https://stedolan.github.io/jq/).
### json.exception.parse_error.102
JSON uses the `\uxxxx` format to describe Unicode characters. Code points above above 0xFFFF are split into two `\uxxxx` entries ("surrogate pairs"). This error indicates that the surrogate pair is incomplete or contains an invalid code point.
!!! failure "Example message"
```
parse error at 14: missing or wrong low surrogate
```
### json.exception.parse_error.103
Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid.
!!! failure "Example message"
```
parse error: code points above 0x10FFFF are invalid
```
### json.exception.parse_error.104
[RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects.
!!! failure "Example message"
```
[json.exception.parse_error.104] parse error: JSON patch must be an array of objects
```
### json.exception.parse_error.105
An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors.
!!! failure "Example message"
```
[json.exception.parse_error.105] parse error: operation 'add' must have member 'value'
```
```
[json.exception.parse_error.105] parse error: operation 'copy' must have string member 'from'
```
```
[json.exception.parse_error.105] parse error: operation value 'foo' is invalid
```
### json.exception.parse_error.106
An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`.
!!! failure "Example message"
```
[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'
```
### json.exception.parse_error.107
A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character.
!!! failure "Example message"
```
[json.exception.parse_error.107] parse error at byte 1: JSON pointer must be empty or begin with '/' - was: 'foo'
```
### json.exception.parse_error.108
In a JSON Pointer, only `~0` and `~1` are valid escape sequences.
!!! failure "Example message"
```
[json.exception.parse_error.108] parse error: escape character '~' must be followed with '0' or '1'
```
### json.exception.parse_error.109
A JSON Pointer array index must be a number.
!!! failure "Example message"
```
[json.exception.parse_error.109] parse error: array index 'one' is not a number
```
```
[json.exception.parse_error.109] parse error: array index '+1' is not a number
```
### json.exception.parse_error.110
When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read.
!!! failure "Example message"
```
[json.exception.parse_error.110] parse error at byte 5: syntax error while parsing CBOR string: unexpected end of input
```
```
[json.exception.parse_error.110] parse error at byte 2: syntax error while parsing UBJSON value: expected end of input; last byte: 0x5A
```
### json.exception.parse_error.112
Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read.
!!! failure "Example message"
```
[json.exception.parse_error.112] parse error at byte 1: syntax error while parsing CBOR value: invalid byte: 0x1C
```
### json.exception.parse_error.113
While parsing a map key, a value that is not a string has been read.
!!! failure "Example message"
```
[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing CBOR string: expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0xFF
```
```
[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing MessagePack string: expected length specification (0xA0-0xBF, 0xD9-0xDB); last byte: 0xFF
```
```
[json.exception.parse_error.113] parse error at byte 2: syntax error while parsing UBJSON char: byte after 'C' must be in range 0x00..0x7F; last byte: 0x82
```
### json.exception.parse_error.114
The parsing of the corresponding BSON record type is not implemented (yet).
!!! failure "Example message"
```
[json.exception.parse_error.114] parse error at byte 5: Unsupported BSON record type 0xFF
```
### json.exception.parse_error.115
A UBJSON high-precision number could not be parsed.
!!! failure "Example message"
```
[json.exception.parse_error.115] parse error at byte 5: syntax error while parsing UBJSON high-precision number: invalid number text: 1A
```
## Iterator errors
This exception is thrown if iterators passed to a library function do not match
the expected semantics.
Exceptions have ids 2xx.
??? example
The following code shows how an `invalid_iterator` exception can be caught.
```cpp
--8<-- "examples/invalid_iterator.cpp"
```
Output:
```
--8<-- "examples/invalid_iterator.output"
```
### json.exception.invalid_iterator.201
The iterators passed to constructor `basic_json(InputIT first, InputIT last)` are not compatible, meaning they do not belong to the same container. Therefore, the range (`first`, `last`) is invalid.
!!! failure "Example message"
```
[json.exception.invalid_iterator.201] iterators are not compatible
```
### json.exception.invalid_iterator.202
In an erase or insert function, the passed iterator @a pos does not belong to the JSON value for which the function was called. It hence does not define a valid position for the deletion/insertion.
!!! failure "Example message"
```
[json.exception.invalid_iterator.202] iterator does not fit current value
```
```
[json.exception.invalid_iterator.202] iterators first and last must point to objects
```
### json.exception.invalid_iterator.203
Either iterator passed to function `erase(IteratorType` first, IteratorType last) does not belong to the JSON value from which values shall be erased. It hence does not define a valid range to delete values from.
!!! failure "Example message"
```
[json.exception.invalid_iterator.203] iterators do not fit current value
```
### json.exception.invalid_iterator.204
When an iterator range for a primitive type (number, boolean, or string) is passed to a constructor or an erase function, this range has to be exactly (`begin(),` `end()),` because this is the only way the single stored value is expressed. All other ranges are invalid.
!!! failure "Example message"
```
[json.exception.invalid_iterator.204] iterators out of range
```
### json.exception.invalid_iterator.205
When an iterator for a primitive type (number, boolean, or string) is passed to an erase function, the iterator has to be the `begin()` iterator, because it is the only way to address the stored value. All other iterators are invalid.
!!! failure "Example message"
```
[json.exception.invalid_iterator.205] iterator out of range
```
### json.exception.invalid_iterator.206
The iterators passed to constructor `basic_json(InputIT first, InputIT last)` belong to a JSON null value and hence to not define a valid range.
!!! failure "Example message"
```
[json.exception.invalid_iterator.206] cannot construct with iterators from null
```
### json.exception.invalid_iterator.207
The `key()` member function can only be used on iterators belonging to a JSON object, because other types do not have a concept of a key.
!!! failure "Example message"
```
[json.exception.invalid_iterator.207] cannot use key() for non-object iterators
```
### json.exception.invalid_iterator.208
The `operator[]` to specify a concrete offset cannot be used on iterators belonging to a JSON object, because JSON objects are unordered.
!!! failure "Example message"
```
[json.exception.invalid_iterator.208] cannot use operator[] for object iterators
```
### json.exception.invalid_iterator.209
The offset operators (`+`, `-`, `+=`, `-=`) cannot be used on iterators belonging to a JSON object, because JSON objects are unordered.
!!! failure "Example message"
```
[json.exception.invalid_iterator.209] cannot use offsets with object iterators
```
### json.exception.invalid_iterator.210
The iterator range passed to the insert function are not compatible, meaning they do not belong to the same container. Therefore, the range (`first`, `last`) is invalid.
!!! failure "Example message"
```
[json.exception.invalid_iterator.210] iterators do not fit
```
### json.exception.invalid_iterator.211
The iterator range passed to the insert function must not be a subrange of the container to insert to.
!!! failure "Example message"
```
[json.exception.invalid_iterator.211] passed iterators may not belong to container
```
### json.exception.invalid_iterator.212
When two iterators are compared, they must belong to the same container.
!!! failure "Example message"
```
[json.exception.invalid_iterator.212] cannot compare iterators of different containers
```
### json.exception.invalid_iterator.213
The order of object iterators cannot be compared, because JSON objects are unordered.
!!! failure "Example message"
```
[json.exception.invalid_iterator.213] cannot compare order of object iterators
```
### json.exception.invalid_iterator.214
Cannot get value for iterator: Either the iterator belongs to a null value or it is an iterator to a primitive type (number, boolean, or string), but the iterator is different to `begin()`.
!!! failure "Example message"
```
[json.exception.invalid_iterator.214] cannot get value
```
## Type errors
This exception is thrown in case of a type error; that is, a library function is executed on a JSON value whose type does not match the expected semantics.
Exceptions have ids 3xx.
??? example
The following code shows how a `type_error` exception can be caught.
```cpp
--8<-- "examples/type_error.cpp"
```
Output:
```
--8<-- "examples/type_error.output"
```
### json.exception.type_error.301
To create an object from an initializer list, the initializer list must consist only of a list of pairs whose first element is a string. When this constraint is violated, an array is created instead.
!!! failure "Example message"
```
[json.exception.type_error.301] cannot create object from initializer list
```
### json.exception.type_error.302
During implicit or explicit value conversion, the JSON type must be compatible to the target type. For instance, a JSON string can only be converted into string types, but not into numbers or boolean types.
!!! failure "Example message"
```
[json.exception.type_error.302] type must be object, but is null
```
```
[json.exception.type_error.302] type must be string, but is object
```
### json.exception.type_error.303
To retrieve a reference to a value stored in a `basic_json` object with `get_ref`, the type of the reference must match the value type. For instance, for a JSON array, the `ReferenceType` must be `array_t &`.
!!! failure "Example message"
```
[json.exception.type_error.303] incompatible ReferenceType for get_ref, actual type is object
```
```
[json.exception.type_error.303] incompatible ReferenceType for get_ref, actual type is number"
```
### json.exception.type_error.304
The `at()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.304] cannot use at() with string
```
```
[json.exception.type_error.304] cannot use at() with number
```
### json.exception.type_error.305
The `operator[]` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.305] cannot use operator[] with a string argument with array
```
```
[json.exception.type_error.305] cannot use operator[] with a numeric argument with object
```
### json.exception.type_error.306
The `value()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.306] cannot use value() with number
```
### json.exception.type_error.307
The `erase()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.307] cannot use erase() with string
```
### json.exception.type_error.308
The `push_back()` and `operator+=` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.308] cannot use push_back() with string
```
### json.exception.type_error.309
The `insert()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.309] cannot use insert() with array
```
```
[json.exception.type_error.309] cannot use insert() with number
```
### json.exception.type_error.310
The `swap()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.310] cannot use swap() with number
```
### json.exception.type_error.311
The `emplace()` and `emplace_back()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.311] cannot use emplace() with number
```
```
[json.exception.type_error.311] cannot use emplace_back() with number
```
### json.exception.type_error.312
The `update()` member functions can only be executed for certain JSON types.
!!! failure "Example message"
```
[json.exception.type_error.312] cannot use update() with array
```
### json.exception.type_error.313
The `unflatten` function converts an object whose keys are JSON Pointers back into an arbitrary nested JSON value. The JSON Pointers must not overlap, because then the resulting value would not be well defined.
!!! failure "Example message"
```
[json.exception.type_error.313] invalid value to unflatten
```
### json.exception.type_error.314
The `unflatten` function only works for an object whose keys are JSON Pointers.
!!! failure "Example message"
Calling `unflatten()` on an array `#!json [1,2,3]`:
```
[json.exception.type_error.314] only objects can be unflattened
```
### json.exception.type_error.315
The `unflatten()` function only works for an object whose keys are JSON Pointers and whose values are primitive.
!!! failure "Example message"
Calling `unflatten()` on an object `#!json {"/1", [1,2,3]}`:
```
[json.exception.type_error.315] values in object must be primitive
```
### json.exception.type_error.316
The `dump()` function only works with UTF-8 encoded strings; that is, if you assign a `std::string` to a JSON value, make sure it is UTF-8 encoded.
!!! failure "Example message"
Calling `dump()` on a JSON value containing an ISO 8859-1 encoded string:
```
[json.exception.type_error.316] invalid UTF-8 byte at index 15: 0x6F
```
!!! tip
- Store the source file with UTF-8 encoding.
- Pass an error handler as last parameter to the `dump()` function to avoid this exception:
- `json::error_handler_t::replace` will replace invalid bytes sequences with `U+FFFD`
- `json::error_handler_t::ignore` will silently ignore invalid byte sequences
### json.exception.type_error.317
The dynamic type of the object cannot be represented in the requested serialization format (e.g. a raw `true` or `null` JSON object cannot be serialized to BSON)
!!! failure "Example message"
Serializing `#!json null` to BSON:
```
[json.exception.type_error.317] to serialize to BSON, top-level type must be object, but is null
```
Serializing `#!json [1,2,3]` to BSON:
```
[json.exception.type_error.317] to serialize to BSON, top-level type must be object, but is array
```
!!! tip
Encapsulate the JSON value in an object. That is, instead of serializing `#!json true`, serialize `#!json {"value": true}`
## Out of range
This exception is thrown in case a library function is called on an input parameter that exceeds the expected range, for instance in case of array indices or nonexisting object keys.
Exceptions have ids 4xx.
??? example
The following code shows how an `out_of_range` exception can be caught.
```cpp
--8<-- "examples/out_of_range.cpp"
```
Output:
```
--8<-- "examples/out_of_range.output"
```
### json.exception.out_of_range.401
The provided array index `i` is larger than `size-1`.
!!! failure "Example message"
```
array index 3 is out of range
```
### json.exception.out_of_range.402
The special array index `-` in a JSON Pointer never describes a valid element of the array, but the index past the end. That is, it can only be used to add elements at this position, but not to read it.
!!! failure "Example message"
```
array index '-' (3) is out of range
```
### json.exception.out_of_range.403
The provided key was not found in the JSON object.
!!! failure "Example message"
```
key 'foo' not found
```
### json.exception.out_of_range.404
A reference token in a JSON Pointer could not be resolved.
!!! failure "Example message"
```
unresolved reference token 'foo'
```
### json.exception.out_of_range.405
The JSON Patch operations 'remove' and 'add' can not be applied to the root element of the JSON value.
!!! failure "Example message"
```
JSON pointer has no parent
```
### json.exception.out_of_range.406
A parsed number could not be stored as without changing it to NaN or INF.
!!! failure "Example message"
```
number overflow parsing '10E1000'
```
### json.exception.out_of_range.407
UBJSON and BSON only support integer numbers up to 9223372036854775807.
!!! failure "Example message"
```
number overflow serializing '9223372036854775808'
```
!!! note
Since version 3.9.0, integer numbers beyond int64 are serialized as high-precision UBJSON numbers, and this exception does not further occur.
### json.exception.out_of_range.408
The size (following `#`) of an UBJSON array or object exceeds the maximal capacity.
!!! failure "Example message"
```
excessive array size: 8658170730974374167
```
### json.exception.out_of_range.409
Key identifiers to be serialized to BSON cannot contain code point U+0000, since the key is stored as zero-terminated c-string.
!!! failure "Example message"
```
BSON key cannot contain code point U+0000 (at byte 2)
```
## Further exceptions
This exception is thrown in case of errors that cannot be classified with the
other exception types.
Exceptions have ids 5xx.
??? example
The following code shows how an `other_error` exception can be caught.
```cpp
--8<-- "examples/other_error.cpp"
```
Output:
```
--8<-- "examples/other_error.output"
```
### json.exception.other_error.501
A JSON Patch operation 'test' failed. The unsuccessful operation is also printed.
!!! failure "Example message"
Executing `#!json {"op":"test", "path":"/baz", "value":"bar"}` on `#!json {"baz": "qux"}`:
```
[json.exception.other_error.501] unsuccessful: {"op":"test","path":"/baz","value":"bar"}
```
|
PypiClean
|
/beeswax-wrapper-1.1.9.tar.gz/beeswax-wrapper-1.1.9/beeswax_wrapper/modules/segments.py
|
from __future__ import unicode_literals
import ujson
from beeswax_wrapper.core.base_classes import BaseAPI
class Segment(BaseAPI):
"""Beeswax Segment API class"""
paths = ['segment']
def __init__(self, *args, **kwargs):
super(Segment, self).__init__(*args, **kwargs)
self.tags = SegmentTag(self._dal)
self.categories = SegmentCategoryAssociation(self._dal)
self.sharing = SegmentSharing(self._dal)
self.lookups = SegmentLookup(self._dal)
self.updates = SegmentUpdate(self._dal)
self.uploads = SegmentUpload(self._dal)
def retrieve(self, segment_id, **kwargs):
"""
:type segment_id: int
:param dict kwargs: segment_key, segment_name, alternative_id, advertiser_id, segment_description
"""
parameters = dict(segment_id=segment_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_id, segment_key, segment_name, alternative_id, advertiser_id, segment_description
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, segment_name, **kwargs):
"""
:type segment_name: str
:param dict kwargs: alternative_id, advertiser_id, segment_description, cpm_cost, ttl_days, aggregate_excludes
"""
parameters = dict(segment_name=segment_name, **kwargs)
return self._call('POST', data=ujson.dumps(parameters))
def update(self, segment_id, **kwargs):
"""
:type segment_id: int
:param dict kwargs: segment_name, alternative_id, advertiser_id, segment_description, cpm_cost,
aggregate_excludes
"""
parameters = dict(segment_id=segment_id, **kwargs)
return self._call('PUT', data=ujson.dumps(parameters))
class SegmentTag(BaseAPI):
"""Beeswax Segment Tag API class"""
paths = ['segment_tag']
def retrieve(self, segment_tag, **kwargs):
"""
:type segment_tag: int
:param dict kwargs: segment_name, advertiser_id, tag_type, format
"""
parameters = dict(segment_tag=segment_tag, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_tag, segment_name, advertiser_id, tag_type, format
"""
return self._call('GET', data=ujson.dumps(kwargs))
class SegmentCategory(BaseAPI):
"""Beeswax Segment Category API class"""
paths = ['segment_category']
def __init__(self, *args, **kwargs):
super(SegmentCategory, self).__init__(*args, **kwargs)
self.segments = SegmentCategoryAssociation(self._dal)
self.lookups = SegmentCategoryLookup(self._dal)
self.sharing = SegmentCategorySharing(self._dal)
def retrieve(self, segment_category_id, **kwargs):
"""
:type segment_category_id: int
:param dict kwargs: alternative_id, segment_category_key, segment_category_name, parent_category_key,
advertiser_id
"""
parameters = dict(segment_category_id=segment_category_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_category_id, alternative_id, segment_category_key, segment_category_name,
parent_category_key, advertiser_id
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, segment_category_name, **kwargs):
"""
:type segment_category_name: str
:param dict kwargs: alternative_id, parent_category_key, advertiser_id
"""
parameters = dict(segment_category_name=segment_category_name, **kwargs)
return self._call('POST', data=ujson.dumps(parameters))
def update(self, segment_category_id, **kwargs):
"""
:type segment_category_id: int
:param dict kwargs: segment_category_name, alternativ_id, alternative_id, advertiser_id
"""
parameters = dict(segment_category_id=segment_category_id, **kwargs)
return self._call('PUT', data=ujson.dumps(parameters))
def delete(self, segment_category_id):
"""
:type segment_category_id: int
"""
parameters = dict(segment_category_id=segment_category_id)
return self._call('DELETE', data=ujson.dumps(parameters))
class SegmentCategoryAssociation(BaseAPI):
"""Beeswax Segment Category Association API class"""
paths = ['segment_category_association']
def retrieve(self, segment_category_association_id, **kwargs):
"""
:type segment_category_association_id: int
:param dict kwargs: segment_category_key, segment_key
"""
parameters = dict(segment_category_association_id=segment_category_association_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_category_association_id, segment_category_key, segment_key
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, segment_category_key, segment_key):
"""
:type segment_category_key: int
:type segment_key: int
"""
parameters = dict(segment_category_key=segment_category_key, segment_key=segment_key)
return self._call('POST', data=ujson.dumps(parameters))
def delete(self, segment_category_association_id):
"""
:type segment_category_association_id: int
"""
parameters = dict(segment_category_association_id=segment_category_association_id)
return self._call('DELETE', data=ujson.dumps(parameters))
class SegmentSharing(BaseAPI):
"""Beeswax Segment Sharing API class"""
paths = ['segment_sharing']
def retrieve(self, segment_sharing_id, **kwargs):
"""
:type segment_sharing_id: int
:param dict kwargs: segment_key, shared_account_id, active
"""
parameters = dict(segment_sharing_id=segment_sharing_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_sharing_id, segment_key, shared_account_id, active
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, segment_key, shared_account_id, **kwargs):
"""
:type segment_key: int
:type shared_account_id: int
:param dict kwargs: active, cpm_cost
"""
parameters = dict(segment_key=segment_key, shared_account_id=shared_account_id, **kwargs)
return self._call('POST', data=ujson.dumps(parameters))
def update(self, segment_sharing_id, **kwargs):
"""
:type segment_sharing_id: int
:param dict kwargs: segment_key, shared_account_id, active, cpm_cost
"""
parameters = dict(segment_sharing_id=segment_sharing_id, **kwargs)
return self._call('PUT', data=ujson.dumps(parameters))
class SegmentCategorySharing(BaseAPI):
"""Beeswax Segment Category Sharing API class"""
paths = ['segment_category_sharing']
def retrieve(self, segment_category_sharing_id, **kwargs):
"""
:type segment_category_sharing_id: int
:param dict kwargs: segment_category_key, shared_account_id, active
"""
parameters = dict(segment_category_sharing_id=segment_category_sharing_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_category_sharing_id, segment_category_key, shared_account_id, active
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, segment_category_key, shared_account_id, **kwargs):
"""
:type segment_category_key: int
:type shared_account_id: int
:param dict kwargs: active, cpm_cost
"""
parameters = dict(segment_category_key=segment_category_key, shared_account_id=shared_account_id, **kwargs)
return self._call('POST', data=ujson.dumps(parameters))
def update(self, segment_category_sharing_id, **kwargs):
"""
:type segment_category_sharing_id: int
:param dict kwargs: segment_key, shared_account_id, active, cpm_cost
"""
parameters = dict(segment_category_sharing_id=segment_category_sharing_id, **kwargs)
return self._call('PUT', data=ujson.dumps(parameters))
class SegmentLookup(BaseAPI):
"""Beeswax Segment Lookup API class"""
paths = ['segment_lookup']
def retrieve(self, segment_id, **kwargs):
"""
:type segment_id: int
:param dict kwargs: segment_key, segment_name, source
"""
parameters = dict(segment_id=segment_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_id, segment_key, segment_name, source
"""
return self._call('GET', data=ujson.dumps(kwargs))
class SegmentCategoryLookup(BaseAPI):
"""Beeswax Segment Lookup API class"""
paths = ['segment_category_lookup']
def retrieve(self, segment_category_id, **kwargs):
"""
:type segment_category_id: int
:param dict kwargs: segment_category_key, segment_category_name, source
"""
parameters = dict(segment_category_id=segment_category_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_category_id, segment_category_key, segment_category_name, source
"""
return self._call('GET', data=ujson.dumps(kwargs))
class SegmentUpload(BaseAPI):
"""Beeswax Segment Upload API class"""
paths = ['segment_upload']
def retrieve(self, segment_upload_id, **kwargs):
"""
:type segment_upload_id: int
:param dict kwargs: file_name, upload_status, upload_complete_date, create_date, update_date
"""
parameters = dict(segment_upload_id=segment_upload_id, **kwargs)
return self._call('GET', data=ujson.dumps(parameters))[0]
def list(self, **kwargs):
"""
:param dict kwargs: segment_upload_id, file_name, upload_status, upload_complete_date, create_date, update_date
"""
return self._call('GET', data=ujson.dumps(kwargs))
def create(self, user_id_type, **kwargs):
"""
:type user_id_type: str
:param dict kwargs: file_name, path_to_file, datacenter, file_format, segment_key_type, continent, overwrite,
operation_type, segment_file_list
"""
parameters = dict(user_id_type=user_id_type, **kwargs)
if 'segment_file_list' in parameters:
return self._call('POST', data=ujson.dumps(parameters))
else:
files = kwargs.pop('path_to_file')
upload_request_data = self._call('POST', data=ujson.dumps(parameters))
return self._dal.call('POST', self.paths + ['upload', upload_request_data['payload']['id']], files=files)
class SegmentUpdate(BaseAPI):
"""Beeswax Segment Update API class"""
paths = ['segment_update']
def create(self, user_data, **kwargs):
"""
:type user_data: list
:param dict kwargs: continent, segment_key_type, user_id_type
"""
parameters = dict(user_data=user_data, **kwargs)
return self._call('POST', data=ujson.dumps(parameters))
|
PypiClean
|
/azure_mgmt_kubernetesconfiguration-3.0.0-py3-none-any.whl/azure/mgmt/kubernetesconfiguration/v2022_11_01/aio/_source_control_configuration_client.py
|
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from ..._serialization import Deserializer, Serializer
from ._configuration import SourceControlConfigurationClientConfiguration
from .operations import (
ExtensionsOperations,
FluxConfigOperationStatusOperations,
FluxConfigurationsOperations,
OperationStatusOperations,
Operations,
SourceControlConfigurationsOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class SourceControlConfigurationClient: # pylint: disable=client-accepts-api-version-keyword
"""KubernetesConfiguration Client.
:ivar extensions: ExtensionsOperations operations
:vartype extensions:
azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.ExtensionsOperations
:ivar operation_status: OperationStatusOperations operations
:vartype operation_status:
azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.OperationStatusOperations
:ivar flux_configurations: FluxConfigurationsOperations operations
:vartype flux_configurations:
azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.FluxConfigurationsOperations
:ivar flux_config_operation_status: FluxConfigOperationStatusOperations operations
:vartype flux_config_operation_status:
azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.FluxConfigOperationStatusOperations
:ivar source_control_configurations: SourceControlConfigurationsOperations operations
:vartype source_control_configurations:
azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.SourceControlConfigurationsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.kubernetesconfiguration.v2022_11_01.aio.operations.Operations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2022-11-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = SourceControlConfigurationClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.extensions = ExtensionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operation_status = OperationStatusOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.flux_configurations = FluxConfigurationsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.flux_config_operation_status = FluxConfigOperationStatusOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.source_control_configurations = SourceControlConfigurationsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "SourceControlConfigurationClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
|
PypiClean
|
/eric-ide-22.7.1.tar.gz/eric-ide-22.7.1/eric7/EricWidgets/EricTreeWidget.py
|
# Copyright (c) 2009 - 2022 Detlev Offenbach <[email protected]>
#
"""
Module implementing specialized tree views.
"""
import enum
from PyQt6.QtCore import pyqtSignal, Qt
from PyQt6.QtWidgets import QTreeWidget, QTreeWidgetItem, QAbstractItemView
class EricTreeWidgetItemsState(enum.Enum):
"""
Class defining the items expansion state.
"""
COLLAPSED = 0
EXPANDED = 1
class EricTreeWidget(QTreeWidget):
"""
Class implementing an extended tree widget.
@signal itemControlClicked(QTreeWidgetItem) emitted after a Ctrl-Click
on an item
@signal itemMiddleButtonClicked(QTreeWidgetItem) emitted after a click
of the middle button on an item
"""
itemControlClicked = pyqtSignal(QTreeWidgetItem)
itemMiddleButtonClicked = pyqtSignal(QTreeWidgetItem)
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent widget (QWidget)
"""
super().__init__(parent)
self.__refreshAllItemsNeeded = True
self.__allTreeItems = []
self.__showMode = EricTreeWidgetItemsState.COLLAPSED
self.setVerticalScrollMode(QAbstractItemView.ScrollMode.ScrollPerPixel)
self.itemChanged.connect(self.__scheduleRefresh)
def setDefaultItemShowMode(self, mode):
"""
Public method to set the default item show mode.
@param mode default mode
@type EricTreeWidgetItemsState
"""
self.__showMode = mode
def allItems(self):
"""
Public method to get a list of all items.
@return list of all items (list of QTreeWidgetItem)
"""
if self.__refreshAllItemsNeeded:
self.__allTreeItems = []
self.__iterateAllItems(None)
self.__refreshAllItemsNeeded = False
return self.__allTreeItems
def appendToParentItem(self, parent, item):
"""
Public method to append an item to a parent item.
@param parent text of the parent item (string) or
the parent item (QTreeWidgetItem)
@param item item to be appended (QTreeWidgetItem)
@return flag indicating success (boolean)
@exception RuntimeError raised to indicate an illegal type for
the parent
"""
if not isinstance(parent, (QTreeWidgetItem, str)):
raise RuntimeError("illegal type for parent")
if isinstance(parent, QTreeWidgetItem):
if parent is None or parent.treeWidget() != self:
return False
parentItem = parent
else:
lst = self.findItems(parent, Qt.MatchFlag.MatchExactly)
if not lst:
return False
parentItem = lst[0]
if parentItem is None:
return False
self.__allTreeItems.append(item)
parentItem.addChild(item)
return True
def prependToParentItem(self, parent, item):
"""
Public method to prepend an item to a parent item.
@param parent text of the parent item (string) or
the parent item (QTreeWidgetItem)
@param item item to be prepended (QTreeWidgetItem)
@return flag indicating success (boolean)
@exception RuntimeError raised to indicate an illegal type for
the parent
"""
if not isinstance(parent, (QTreeWidgetItem, str)):
raise RuntimeError("illegal type for parent")
if isinstance(parent, QTreeWidgetItem):
if parent is None or parent.treeWidget() != self:
return False
parentItem = parent
else:
lst = self.findItems(parent, Qt.MatchFlag.MatchExactly)
if not lst:
return False
parentItem = lst[0]
if parentItem is None:
return False
self.__allTreeItems.append(item)
parentItem.insertChild(0, item)
return True
def addTopLevelItem(self, item):
"""
Public method to add a top level item.
@param item item to be added as a top level item (QTreeWidgetItem)
"""
self.__allTreeItems.append(item)
super().addTopLevelItem(item)
def addTopLevelItems(self, items):
"""
Public method to add a list of top level items.
@param items items to be added as top level items
(list of QTreeWidgetItem)
"""
self.__allTreeItems.extend(items)
super().addTopLevelItems(items)
def insertTopLevelItem(self, index, item):
"""
Public method to insert a top level item.
@param index index for the insertion (integer)
@param item item to be inserted as a top level item (QTreeWidgetItem)
"""
self.__allTreeItems.append(item)
super().insertTopLevelItem(index, item)
def insertTopLevelItems(self, index, items):
"""
Public method to insert a list of top level items.
@param index index for the insertion (integer)
@param items items to be inserted as top level items
(list of QTreeWidgetItem)
"""
self.__allTreeItems.extend(items)
super().insertTopLevelItems(index, items)
def deleteItem(self, item):
"""
Public method to delete an item.
@param item item to be deleted (QTreeWidgetItem)
"""
if item in self.__allTreeItems:
self.__allTreeItems.remove(item)
self.__refreshAllItemsNeeded = True
del item
def deleteItems(self, items):
"""
Public method to delete a list of items.
@param items items to be deleted (list of QTreeWidgetItem)
"""
for item in items:
self.deleteItem(item)
def filterString(self, filterStr):
"""
Public slot to set a new filter.
@param filterStr filter to be set (string)
"""
self.expandAll()
allItems = self.allItems()
if filterStr:
lFilter = filterStr.lower()
for itm in allItems:
itm.setHidden(lFilter not in itm.text(0).lower())
itm.setExpanded(True)
for index in range(self.topLevelItemCount()):
self.topLevelItem(index).setHidden(False)
firstItm = self.topLevelItem(0)
belowItm = self.itemBelow(firstItm)
topLvlIndex = 0
while firstItm:
if lFilter in firstItm.text(0).lower():
firstItm.setHidden(False)
elif (
not firstItm.parent() and
(not belowItm or not belowItm.parent())
):
firstItm.setHidden(True)
elif not belowItm:
break
topLvlIndex += 1
firstItm = self.topLevelItem(topLvlIndex)
belowItm = self.itemBelow(firstItm)
else:
for itm in allItems:
itm.setHidden(False)
for index in range(self.topLevelItemCount()):
self.topLevelItem(index).setHidden(False)
if self.__showMode == EricTreeWidgetItemsState.COLLAPSED:
self.collapseAll()
def clear(self):
"""
Public slot to clear the tree.
"""
self.__allTreeItems = []
super().clear()
def __scheduleRefresh(self):
"""
Private slot to schedule a refresh of the tree.
"""
self.__refreshAllItemsNeeded = True
def mousePressEvent(self, evt):
"""
Protected method handling mouse press events.
@param evt mouse press event (QMouseEvent)
"""
if (
evt.modifiers() == Qt.KeyboardModifier.ControlModifier and
evt.buttons() == Qt.MouseButton.LeftButton
):
self.itemControlClicked.emit(self.itemAt(evt.position().toPoint()))
return
elif evt.buttons() == Qt.MouseButton.MiddleButton:
self.itemMiddleButtonClicked.emit(
self.itemAt(evt.position().toPoint()))
return
else:
super().mousePressEvent(evt)
def __iterateAllItems(self, parent):
"""
Private method to iterate over the child items of the parent.
@param parent parent item to iterate (QTreeWidgetItem)
"""
count = parent.childCount() if parent else self.topLevelItemCount()
for index in range(count):
itm = parent.child(index) if parent else self.topLevelItem(index)
if itm.childCount() == 0:
self.__allTreeItems.append(itm)
self.__iterateAllItems(itm)
|
PypiClean
|
/sqre-codekit-github3.py-1.0.0a4.tar.gz/sqre-codekit-github3.py-1.0.0a4/github3/git.py
|
from __future__ import unicode_literals
from json import dumps
from base64 import b64decode
from .models import GitHubCore, BaseCommit
from .decorators import requires_auth
class Blob(GitHubCore):
"""The :class:`Blob <Blob>` object.
See also: http://developer.github.com/v3/git/blobs/
"""
def _update_attributes(self, blob):
self._api = self._get_attribute(blob, 'url')
#: Raw content of the blob.
self.content = self._get_attribute(blob, 'content')
if self.content is not None:
self.content = self.content.encode()
#: Encoding of the raw content.
self.encoding = self._get_attribute(blob, 'encoding')
#: Decoded content of the blob.
self.decoded = self.content
if self.encoding == 'base64':
self.decoded = b64decode(self.content)
#: Size of the blob in bytes
self.size = self._get_attribute(blob, 'size')
#: SHA1 of the blob
self.sha = self._get_attribute(blob, 'sha')
def _repr(self):
return '<Blob [{0:.10}]>'.format(self.sha)
class GitData(GitHubCore):
"""The :class:`GitData <GitData>` object. This isn't directly returned to
the user (developer) ever. This is used to prevent duplication of some
common items among other Git Data objects.
"""
def _update_attributes(self, data):
#: SHA of the object
self.sha = self._get_attribute(data, 'sha')
self._api = self._get_attribute(data, 'url')
class Commit(BaseCommit):
"""The :class:`Commit <Commit>` object. This represents a commit made in a
repository.
See also: http://developer.github.com/v3/git/commits/
"""
def _update_attributes(self, commit):
super(Commit, self)._update_attributes(commit)
#: dict containing at least the name, email and date the commit was
#: created
self.author = self._get_attribute(commit, 'author', {})
# If GH returns nil/None then make sure author is a dict
self._author_name = self._get_attribute(self.author, 'name')
#: dict containing similar information to the author attribute
self.committer = self._get_attribute(commit, 'committer', {})
# blank the data if GH returns no data
self._commit_name = self._get_attribute(self.committer, 'name')
#: :class:`Tree <Tree>` the commit belongs to.
self.tree = self._class_attribute(commit, 'tree', Tree, self)
def _repr(self):
return '<Commit [{0}:{1}]>'.format(self._author_name, self.sha)
class Reference(GitHubCore):
"""The :class:`Reference <Reference>` object. This represents a reference
created on a repository.
See also: http://developer.github.com/v3/git/refs/
"""
def _update_attributes(self, ref):
self._api = self._get_attribute(ref, 'url')
#: The reference path, e.g., refs/heads/sc/featureA
self.ref = self._get_attribute(ref, 'ref')
#: :class:`GitObject <GitObject>` the reference points to
self.object = self._class_attribute(ref, 'object', GitObject)
def _repr(self):
return '<Reference [{0}]>'.format(self.ref)
@requires_auth
def delete(self):
"""Delete this reference.
:returns: bool
"""
return self._boolean(self._delete(self._api), 204, 404)
@requires_auth
def update(self, sha, force=False):
"""Update this reference.
:param str sha: (required), sha of the reference
:param bool force: (optional), force the update or not
:returns: bool
"""
data = {'sha': sha, 'force': force}
json = self._json(self._patch(self._api, data=dumps(data)), 200)
if json:
self._update_attributes(json)
return True
return False
class GitObject(GitData):
"""The :class:`GitObject <GitObject>` object."""
def _update_attributes(self, obj):
super(GitObject, self)._update_attributes(obj)
#: The type of object.
self.type = self._get_attribute(obj, 'type')
def _repr(self):
return '<Git Object [{0}]>'.format(self.sha)
class Tag(GitData):
"""The :class:`Tag <Tag>` object.
See also: http://developer.github.com/v3/git/tags/
"""
def _update_attributes(self, tag):
super(Tag, self)._update_attributes(tag)
#: String of the tag
self.tag = self._get_attribute(tag, 'tag')
#: Commit message for the tag
self.message = self._get_attribute(tag, 'message')
#: dict containing the name and email of the person
self.tagger = self._get_attribute(tag, 'tagger')
#: :class:`GitObject <GitObject>` for the tag
self.object = self._class_attribute(tag, 'object', GitObject)
def _repr(self):
return '<Tag [{0}]>'.format(self.tag)
class Tree(GitData):
"""The :class:`Tree <Tree>` object.
See also: http://developer.github.com/v3/git/trees/
"""
def _update_attributes(self, tree):
super(Tree, self)._update_attributes(tree)
#: list of :class:`Hash <Hash>` objects
self.tree = self._get_attribute(tree, 'tree', [])
if self.tree:
self.tree = [Hash(t) for t in self.tree]
def _repr(self):
return '<Tree [{0}]>'.format(self.sha)
def __eq__(self, other):
return self.as_dict() == other.as_dict()
def __ne__(self, other):
return self.as_dict() != other.as_dict()
def recurse(self):
"""Recurse into the tree.
:returns: :class:`Tree <Tree>`
"""
json = self._json(self._get(self._api, params={'recursive': '1'}),
200)
return self._instance_or_null(Tree, json)
class Hash(GitHubCore):
"""The :class:`Hash <Hash>` object.
See also: http://developer.github.com/v3/git/trees/#create-a-tree
"""
def _update_attributes(self, info):
#: Path to file
self.path = self._get_attribute(info, 'path')
#: File mode
self.mode = self._get_attribute(info, 'mode')
#: Type of hash, e.g., blob
self.type = self._get_attribute(info, 'type')
#: Size of hash
self.size = self._get_attribute(info, 'size')
#: SHA of the hash
self.sha = self._get_attribute(info, 'sha')
#: URL of this object in the GitHub API
self.url = self._get_attribute(info, 'url')
def _repr(self):
return '<Hash [{0}]>'.format(self.sha)
|
PypiClean
|
/pulumi_oci-1.9.0a1693465256.tar.gz/pulumi_oci-1.9.0a1693465256/pulumi_oci/usageproxy/get_subscription_redeemable_users.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = [
'GetSubscriptionRedeemableUsersResult',
'AwaitableGetSubscriptionRedeemableUsersResult',
'get_subscription_redeemable_users',
'get_subscription_redeemable_users_output',
]
@pulumi.output_type
class GetSubscriptionRedeemableUsersResult:
"""
A collection of values returned by getSubscriptionRedeemableUsers.
"""
def __init__(__self__, filters=None, id=None, redeemable_user_collections=None, subscription_id=None, tenancy_id=None):
if filters and not isinstance(filters, list):
raise TypeError("Expected argument 'filters' to be a list")
pulumi.set(__self__, "filters", filters)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if redeemable_user_collections and not isinstance(redeemable_user_collections, list):
raise TypeError("Expected argument 'redeemable_user_collections' to be a list")
pulumi.set(__self__, "redeemable_user_collections", redeemable_user_collections)
if subscription_id and not isinstance(subscription_id, str):
raise TypeError("Expected argument 'subscription_id' to be a str")
pulumi.set(__self__, "subscription_id", subscription_id)
if tenancy_id and not isinstance(tenancy_id, str):
raise TypeError("Expected argument 'tenancy_id' to be a str")
pulumi.set(__self__, "tenancy_id", tenancy_id)
@property
@pulumi.getter
def filters(self) -> Optional[Sequence['outputs.GetSubscriptionRedeemableUsersFilterResult']]:
return pulumi.get(self, "filters")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="redeemableUserCollections")
def redeemable_user_collections(self) -> Sequence['outputs.GetSubscriptionRedeemableUsersRedeemableUserCollectionResult']:
"""
The list of redeemable_user_collection.
"""
return pulumi.get(self, "redeemable_user_collections")
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> str:
return pulumi.get(self, "subscription_id")
@property
@pulumi.getter(name="tenancyId")
def tenancy_id(self) -> str:
return pulumi.get(self, "tenancy_id")
class AwaitableGetSubscriptionRedeemableUsersResult(GetSubscriptionRedeemableUsersResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSubscriptionRedeemableUsersResult(
filters=self.filters,
id=self.id,
redeemable_user_collections=self.redeemable_user_collections,
subscription_id=self.subscription_id,
tenancy_id=self.tenancy_id)
def get_subscription_redeemable_users(filters: Optional[Sequence[pulumi.InputType['GetSubscriptionRedeemableUsersFilterArgs']]] = None,
subscription_id: Optional[str] = None,
tenancy_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionRedeemableUsersResult:
"""
This data source provides the list of Subscription Redeemable Users in Oracle Cloud Infrastructure Usage Proxy service.
Provides the list of user summary that can redeem rewards for the given subscription ID.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_subscription_redeemable_users = oci.UsageProxy.get_subscription_redeemable_users(subscription_id=oci_ons_subscription["test_subscription"]["id"],
tenancy_id=oci_identity_tenancy["test_tenancy"]["id"])
```
:param str subscription_id: The subscription ID for which rewards information is requested for.
:param str tenancy_id: The OCID of the tenancy.
"""
__args__ = dict()
__args__['filters'] = filters
__args__['subscriptionId'] = subscription_id
__args__['tenancyId'] = tenancy_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('oci:UsageProxy/getSubscriptionRedeemableUsers:getSubscriptionRedeemableUsers', __args__, opts=opts, typ=GetSubscriptionRedeemableUsersResult).value
return AwaitableGetSubscriptionRedeemableUsersResult(
filters=pulumi.get(__ret__, 'filters'),
id=pulumi.get(__ret__, 'id'),
redeemable_user_collections=pulumi.get(__ret__, 'redeemable_user_collections'),
subscription_id=pulumi.get(__ret__, 'subscription_id'),
tenancy_id=pulumi.get(__ret__, 'tenancy_id'))
@_utilities.lift_output_func(get_subscription_redeemable_users)
def get_subscription_redeemable_users_output(filters: Optional[pulumi.Input[Optional[Sequence[pulumi.InputType['GetSubscriptionRedeemableUsersFilterArgs']]]]] = None,
subscription_id: Optional[pulumi.Input[str]] = None,
tenancy_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSubscriptionRedeemableUsersResult]:
"""
This data source provides the list of Subscription Redeemable Users in Oracle Cloud Infrastructure Usage Proxy service.
Provides the list of user summary that can redeem rewards for the given subscription ID.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_subscription_redeemable_users = oci.UsageProxy.get_subscription_redeemable_users(subscription_id=oci_ons_subscription["test_subscription"]["id"],
tenancy_id=oci_identity_tenancy["test_tenancy"]["id"])
```
:param str subscription_id: The subscription ID for which rewards information is requested for.
:param str tenancy_id: The OCID of the tenancy.
"""
...
|
PypiClean
|
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/domain/UpdateCodeResult.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class UpdateCodeResult(object):
def __init__(self):
self._biz_id = None
self._code_token = None
self._context_data = None
self._error_code = None
self._error_message = None
self._success = None
@property
def biz_id(self):
return self._biz_id
@biz_id.setter
def biz_id(self, value):
self._biz_id = value
@property
def code_token(self):
return self._code_token
@code_token.setter
def code_token(self, value):
self._code_token = value
@property
def context_data(self):
return self._context_data
@context_data.setter
def context_data(self, value):
self._context_data = value
@property
def error_code(self):
return self._error_code
@error_code.setter
def error_code(self, value):
self._error_code = value
@property
def error_message(self):
return self._error_message
@error_message.setter
def error_message(self, value):
self._error_message = value
@property
def success(self):
return self._success
@success.setter
def success(self, value):
self._success = value
def to_alipay_dict(self):
params = dict()
if self.biz_id:
if hasattr(self.biz_id, 'to_alipay_dict'):
params['biz_id'] = self.biz_id.to_alipay_dict()
else:
params['biz_id'] = self.biz_id
if self.code_token:
if hasattr(self.code_token, 'to_alipay_dict'):
params['code_token'] = self.code_token.to_alipay_dict()
else:
params['code_token'] = self.code_token
if self.context_data:
if hasattr(self.context_data, 'to_alipay_dict'):
params['context_data'] = self.context_data.to_alipay_dict()
else:
params['context_data'] = self.context_data
if self.error_code:
if hasattr(self.error_code, 'to_alipay_dict'):
params['error_code'] = self.error_code.to_alipay_dict()
else:
params['error_code'] = self.error_code
if self.error_message:
if hasattr(self.error_message, 'to_alipay_dict'):
params['error_message'] = self.error_message.to_alipay_dict()
else:
params['error_message'] = self.error_message
if self.success:
if hasattr(self.success, 'to_alipay_dict'):
params['success'] = self.success.to_alipay_dict()
else:
params['success'] = self.success
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = UpdateCodeResult()
if 'biz_id' in d:
o.biz_id = d['biz_id']
if 'code_token' in d:
o.code_token = d['code_token']
if 'context_data' in d:
o.context_data = d['context_data']
if 'error_code' in d:
o.error_code = d['error_code']
if 'error_message' in d:
o.error_message = d['error_message']
if 'success' in d:
o.success = d['success']
return o
|
PypiClean
|
/walt_client-8.2-py3-none-any.whl/walt/client/doc/md/help-intro.md
|
# Welcome to WalT documentation!
The [WalT project](https://walt-project.liglab.fr) allows to easily build and use your own WalT testbed.
Up to now, WalT testbeds have been used for research experiments, medium-sized (e.g., 100 nodes) industrial testing infrastructures, and mobile demo setups.
## Scope and design notes
WalT is designed as **a tool for teammates**.
If many users want to use WalT, then **each team should install its own private WalT platform**.
Much of the versatility and user-friendliness we advertise comes from this concept of private platform.
See [`walt help show design-notes`](design-notes.md) for more info.
## Main features
WalT mainly provides the following set of features:
* **Physical access** (see below) and/or **remote control** over nodes
* Compatibility with various kinds of nodes:
- **Raspberry Pi** B/B+/2B/3B/3B+/4B/400 nodes
- **PC nodes** booted from a USB dongle
- **Virtual nodes**
- Distant Raspberry Pi nodes using WALT ssh-based **VPN** (experimental feature)
* **Management of node OS images**
- Clone or publish them from/to the docker hub (or a private registry)
- Modify them easily (virtual shell sessions, 1-command file copies)
* Means to collect, store and query **experiment logs**
A WALT platform is cost-effective, easy to install, easy to use, lightweight, versatile and collaborative.
## Optional features
With compliant network switches, WalT also provides the following *optional* features:
* **Platform topology automated discovery**;
* **PoE** for simplified deployment, possible **hard-reboot** (power-cycling) of nodes and automatic **power saving**.
See [`walt help show optional-features`](optional-features.md) for more info.
## A key feature: giving users physical access to nodes
WalT platforms provide a high level of versatility: they give users physical access to nodes.
At [LIG](https://www.liglab.fr) for instance, our main WalT platform network is deployed over a VLAN of the building.
Users can plug or unplug walt nodes (or, sometimes, network switches) from the wall plugs depending on the experiments they plan.
Debugging low-level kernel modifications of a network protocol is obviously much easier when having physical access to two or more of these nodes; with such a setup, the user can easily move two WalT nodes right to her desktop.
Moreover, users often use the USB ports of WalT nodes to plug other equipment.
For instance it is easy to setup an IoT testbed by connecting IoT boards on USB ports of WalT nodes. In this case, the experiment runs on the IoT boards and WalT nodes are configured as a control interface for them. WalT nodes boot a WalT image containing tools to flash a firmware or reboot the IoT board, and possibly catch the logs coming for the USB-serial link and turn them into WalT logs.
# Quick Links
## Admin documentation
For installation and upgrade procedures (server, network switches, nodes, VPN), a general view of WalT network structure, etc., checkout [`walt help show admin`](admin.md).
Note: as an alternative to a physical platform installation, or to get a first insight of WalT, Grid'5000 users can also deploy WalT on-demand on the Grid'5000 testbed. See [`walt help show g5k`](g5k.md).
## User documentation
In order to get familiar with main WalT concepts, see [`walt help show tutorial`](tutorial.md).
To understand why a given user owns a given node or not, and for related aspects of terminology, see [`walt help show node-ownership`](node-ownership.md).
WalT provides:
* direct access to nodes (using `walt node shell`, `walt node cp` for instance)
* access to the underlying operating system image (using `walt image shell`, `walt image cp`, etc.)
For details, see:
* [`walt help show shells`](shells.md)
* [`walt help show node-cp`](node-cp.md)
* [`walt help show image-build`](image-build.md)
* [`walt help show image-cp`](image-cp.md)
WalT also provides a logging system to manage your experiment logs (see [`walt help show logging`](logging.md)), python scripting features (see [`walt help show scripting`](scripting.md)), and many other features.
## Advanced topics
For specific needs, it is possible to build your own WalT image from scratch.
See [`walt help show image-from-scratch`](image-from-scratch.md) for more info.
If you want to connect a new kind of node which WalT does not currently
supports, check-out [`walt help show new-node-support`](new-node-support.md).
See [`walt help show node-bootup`](node-bootup.md) for detailed understanding
of walt nodes' bootup precedure.
|
PypiClean
|
/AlignQC-2.0.5.tar.gz/AlignQC-2.0.5/alignqc/annotate_from_genomic_features.py
|
import sys, argparse, gzip, re, os, inspect, itertools
from multiprocessing import Pool, cpu_count
from seqtools.format.gpd import GPDStream
from seqtools.range.multi import merge_ranges, subtract_ranges, BedArrayStream, sort_ranges
from seqtools.range import GenomicRange
from seqtools.stream import MultiLocusStream
def main(args):
inf = None
chrlens = {}
chrbed = []
if re.search('\.gz$',args.chromosome_lengths):
inf = gzip.open(args.chromosome_lengths)
else:
inf = open(args.chromosome_lengths)
for line in inf:
f = line.rstrip().split("\t")
chrlens[f[0]] = int(f[1])
chrbed.append(GenomicRange(f[0],1,int(f[1])))
inf.close()
inf = None
exonbed = []
txbed = []
sys.stderr.write("Reading Exons\n")
if re.search('\.gz$',args.annotation_gpd):
inf = gzip.open(args.annotation_gpd)
else:
inf = open(args.annotation_gpd)
gs = GPDStream(inf)
for gpd in gs:
exonbed += [x.range for x in gpd.exons]
txbed.append(gpd.range)
inf.close()
sys.stderr.write("Merging "+str(len(txbed))+" transcripts\n")
txbed = merge_ranges(txbed)
sys.stderr.write(str(len(txbed))+" transcripts after merging\n")
sys.stderr.write("Finding intergenic\n")
intergenicbed = subtract_ranges(chrbed,txbed)
sys.stderr.write("Found "+str(len(intergenicbed))+" intergenic regions\n")
intergenicbp = sum([x.length for x in intergenicbed])
sys.stderr.write("Intergenic size: "+str(intergenicbp)+"\n")
sys.stderr.write("Merging "+str(len(exonbed))+" exons\n")
exonbed = merge_ranges(exonbed)
sys.stderr.write(str(len(exonbed))+" exons after merging\n")
sys.stderr.write("Finding introns\n")
intronbed = subtract_ranges(txbed,exonbed)
sys.stderr.write("Found "+str(len(intronbed))+" introns\n")
chrbp = sum([x.length for x in chrbed])
sys.stderr.write("Genome size: "+str(chrbp)+"\n")
txbp = sum([x.length for x in txbed])
sys.stderr.write("Tx size: "+str(txbp)+"\n")
exonbp = sum([x.length for x in exonbed])
sys.stderr.write("Exon size: "+str(exonbp)+"\n")
intronbp = sum([x.length for x in intronbed])
sys.stderr.write("Intron size: "+str(intronbp)+"\n")
#sys.stderr.write(str(txbp+intergenicbp)+"\n")
if args.output_beds:
if not os.path.exists(args.output_beds): os.makedirs(args.output_beds)
with open(args.output_beds+'/chrs.bed','w') as of1:
for rng in chrbed: of1.write("\t".join([str(x) for x in rng.get_bed_array()])+"\n")
with open(args.output_beds+'/exon.bed','w') as of1:
for rng in exonbed: of1.write("\t".join([str(x) for x in rng.get_bed_array()])+"\n")
with open(args.output_beds+'/intron.bed','w') as of1:
for rng in intronbed: of1.write("\t".join([str(x) for x in rng.get_bed_array()])+"\n")
with open(args.output_beds+'/intergenic.bed','w') as of1:
for rng in intergenicbed: of1.write("\t".join([str(x) for x in rng.get_bed_array()])+"\n")
with open(args.output_beds+'/tx.bed','w') as of1:
for rng in txbed: of1.write("\t".join([str(x) for x in rng.get_bed_array()])+"\n")
inf = None
if re.search('\.gz$',args.reads_gpd):
inf = gzip.open(args.reads_gpd)
else:
inf = open(args.reads_gpd)
reads = {}
gs = GPDStream(inf)
for gpd in gs:
reads[gpd.gene_name] = {}
sys.stderr.write("Checking "+str(len(reads.keys()))+" Aligned Reads\n")
#now we know all features we can annotate reads
sys.stderr.write("Read through our reads and bed entries\n")
sys.stderr.write("Annotate intron\n")
intron = annotate_gpds(args,intronbed)
intronnames = set(intron.keys())
sys.stderr.write("Annotate intergenic\n")
intergenic = annotate_gpds(args,intergenicbed)
intergenicnames = set(intergenic.keys())
sys.stderr.write("Annotate exons\n")
exons = annotate_gpds(args,exonbed)
exonnames = set(exons.keys())
allnames = exonnames|intronnames|intergenicnames
sys.stderr.write(str(len(allnames))+" reads attributed to a feature\n")
vals = set(reads.keys())-allnames
if len(vals) > 0:
sys.stderr.write("WARNING unable to ascribe annotation to "+str(len(vals))+" reads\n")
donenames = set()
of = sys.stdout
if args.output:
if re.search('\.gz$',args.output):
of = gzip.open(args.output,'w')
else:
of = open(args.output,'w')
for name in allnames:
exonfrac = 0
intronfrac = 0
intergenicfrac = 0
readlen = 0
exoncount = 0
if name in exons:
exonfrac = float(exons[name][1])/float(exons[name][0])
readlen = exons[name][0]
exoncount = exons[name][2]
if name in intron:
intronfrac = float(intron[name][1])/float(intron[name][0])
readlen = intron[name][0]
exoncount = intron[name][2]
if name in intergenic:
intergenicfrac = float(intergenic[name][1])/float(intergenic[name][0])
readlen = intergenic[name][0]
exoncount = intergenic[name][2]
vals = {'exon':exonfrac,'intron':intronfrac,'intergenic':intergenicfrac}
type = None
if exonfrac >= 0.5:
type = 'exon'
elif intronfrac >= 0.5:
type = 'intron'
elif intergenicfrac >= 0.5:
type = 'intergenic'
else:
type = sorted(vals.keys(),key=lambda x: vals[x])[-1]
if vals[type] == 0:
sys.stderr.write("WARNING trouble setting type\n")
if not type: continue
of.write(name+"\t"+type+"\t"+str(exoncount)+"\t"+str(readlen)+"\n")
of.close()
def generate_locus(mls):
for es in mls:
[gpds,inbeds] = es.payload
if len(gpds) == 0 or len(inbeds) == 0:
continue
yield es
def annotate_gpds(args,inputbed):
if args.threads > 1:
p = Pool(processes=args.threads)
bas = BedArrayStream(sort_ranges(inputbed))
inf = None
if re.search('\.gz$',args.reads_gpd):
inf = gzip.open(args.reads_gpd)
else:
inf = open(args.args.reads_gpd)
gs = GPDStream(inf)
mls = MultiLocusStream([gs,bas])
results = {}
# try and implement as a multiprocessing map function
csize = 100 #control how many jobs to send to one thread at a time
if args.threads > 1:
results2 = p.imap_unordered(func=annotate_inner,iterable=generate_locus(mls),chunksize=csize)
else:
results2 = itertools.imap(annotate_inner,generate_locus(mls))
for chunk in results2:
for res in chunk:
results[res[0]] = res[1:]
inf.close()
return results
def annotate_inner(es):
results = []
[gpds,inbeds] = es.payload
for gpd in gpds:
orig = gpd.length
tot = 0
for rng1 in [x.range for x in gpd.exons]:
tot += sum([y.overlap_size(rng1) for y in inbeds])
if tot > 0:
results.append([gpd.gene_name,orig,tot,gpd.get_exon_count()])
return results
def do_inputs():
parser = argparse.ArgumentParser(description="Assign genomic features to reads based on where they majority of the read lies. In the event of a tie prioritize exon over intron and intron over intergenic.",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('reads_gpd',help="reads gpd")
parser.add_argument('annotation_gpd',help="reference annotations gpd")
parser.add_argument('chromosome_lengths',help="reference lengths table")
parser.add_argument('--output_beds',help="save features")
parser.add_argument('-o','--output',help="output results")
parser.add_argument('--threads',default=cpu_count(),type=int,help="number of threads default cpu_count()")
args = parser.parse_args()
return args
def external_cmd(cmd):
cache_argv = sys.argv
sys.argv = cmd
args = do_inputs()
main(args)
sys.argv = cache_argv
if __name__=="__main__":
args = do_inputs()
main(args)
|
PypiClean
|
/displaylang_sympy-0.10.4-py3-none-any.whl/sympy/physics/continuum_mechanics/beam.py
|
from sympy.core import S, Symbol, diff, symbols
from sympy.core.add import Add
from sympy.core.expr import Expr
from sympy.core.function import (Derivative, Function)
from sympy.core.mul import Mul
from sympy.core.relational import Eq
from sympy.core.sympify import sympify
from sympy.solvers import linsolve
from sympy.solvers.ode.ode import dsolve
from sympy.solvers.solvers import solve
from sympy.printing import sstr
from sympy.functions import SingularityFunction, Piecewise, factorial
from sympy.integrals import integrate
from sympy.series import limit
from sympy.plotting import plot, PlotGrid
from sympy.geometry.entity import GeometryEntity
from sympy.external import import_module
from sympy.sets.sets import Interval
from sympy.utilities.lambdify import lambdify
from sympy.utilities.decorator import doctest_depends_on
from sympy.utilities.iterables import iterable
numpy = import_module('numpy', import_kwargs={'fromlist':['arange']})
class Beam:
"""
A Beam is a structural element that is capable of withstanding load
primarily by resisting against bending. Beams are characterized by
their cross sectional profile(Second moment of area), their length
and their material.
.. note::
A consistent sign convention must be used while solving a beam
bending problem; the results will
automatically follow the chosen sign convention. However, the
chosen sign convention must respect the rule that, on the positive
side of beam's axis (in respect to current section), a loading force
giving positive shear yields a negative moment, as below (the
curved arrow shows the positive moment and rotation):
.. image:: allowed-sign-conventions.png
Examples
========
There is a beam of length 4 meters. A constant distributed load of 6 N/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. The deflection of the beam at the end is restricted.
Using the sign convention of downwards forces being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols, Piecewise
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(4, E, I)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(6, 2, 0)
>>> b.apply_load(R2, 4, -1)
>>> b.bc_deflection = [(0, 0), (4, 0)]
>>> b.boundary_conditions
{'deflection': [(0, 0), (4, 0)], 'slope': []}
>>> b.load
R1*SingularityFunction(x, 0, -1) + R2*SingularityFunction(x, 4, -1) + 6*SingularityFunction(x, 2, 0)
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.load
-3*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 2, 0) - 9*SingularityFunction(x, 4, -1)
>>> b.shear_force()
3*SingularityFunction(x, 0, 0) - 6*SingularityFunction(x, 2, 1) + 9*SingularityFunction(x, 4, 0)
>>> b.bending_moment()
3*SingularityFunction(x, 0, 1) - 3*SingularityFunction(x, 2, 2) + 9*SingularityFunction(x, 4, 1)
>>> b.slope()
(-3*SingularityFunction(x, 0, 2)/2 + SingularityFunction(x, 2, 3) - 9*SingularityFunction(x, 4, 2)/2 + 7)/(E*I)
>>> b.deflection()
(7*x - SingularityFunction(x, 0, 3)/2 + SingularityFunction(x, 2, 4)/4 - 3*SingularityFunction(x, 4, 3)/2)/(E*I)
>>> b.deflection().rewrite(Piecewise)
(7*x - Piecewise((x**3, x > 0), (0, True))/2
- 3*Piecewise(((x - 4)**3, x > 4), (0, True))/2
+ Piecewise(((x - 2)**4, x > 2), (0, True))/4)/(E*I)
"""
def __init__(self, length, elastic_modulus, second_moment, area=Symbol('A'), variable=Symbol('x'), base_char='C'):
"""Initializes the class.
Parameters
==========
length : Sympifyable
A Symbol or value representing the Beam's length.
elastic_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of Elasticity.
It is a measure of the stiffness of the Beam material. It can
also be a continuous function of position along the beam.
second_moment : Sympifyable or Geometry object
Describes the cross-section of the beam via a SymPy expression
representing the Beam's second moment of area. It is a geometrical
property of an area which reflects how its points are distributed
with respect to its neutral axis. It can also be a continuous
function of position along the beam. Alternatively ``second_moment``
can be a shape object such as a ``Polygon`` from the geometry module
representing the shape of the cross-section of the beam. In such cases,
it is assumed that the x-axis of the shape object is aligned with the
bending axis of the beam. The second moment of area will be computed
from the shape object internally.
area : Symbol/float
Represents the cross-section area of beam
variable : Symbol, optional
A Symbol object that will be used as the variable along the beam
while representing the load, shear, moment, slope and deflection
curve. By default, it is set to ``Symbol('x')``.
base_char : String, optional
A String that will be used as base character to generate sequential
symbols for integration constants in cases where boundary conditions
are not sufficient to solve them.
"""
self.length = length
self.elastic_modulus = elastic_modulus
if isinstance(second_moment, GeometryEntity):
self.cross_section = second_moment
else:
self.cross_section = None
self.second_moment = second_moment
self.variable = variable
self._base_char = base_char
self._boundary_conditions = {'deflection': [], 'slope': []}
self._load = 0
self._area = area
self._applied_supports = []
self._support_as_loads = []
self._applied_loads = []
self._reaction_loads = {}
self._ild_reactions = {}
self._ild_shear = 0
self._ild_moment = 0
# _original_load is a copy of _load equations with unsubstituted reaction
# forces. It is used for calculating reaction forces in case of I.L.D.
self._original_load = 0
self._composite_type = None
self._hinge_position = None
def __str__(self):
shape_description = self._cross_section if self._cross_section else self._second_moment
str_sol = 'Beam({}, {}, {})'.format(sstr(self._length), sstr(self._elastic_modulus), sstr(shape_description))
return str_sol
@property
def reaction_loads(self):
""" Returns the reaction forces in a dictionary."""
return self._reaction_loads
@property
def ild_shear(self):
""" Returns the I.L.D. shear equation."""
return self._ild_shear
@property
def ild_reactions(self):
""" Returns the I.L.D. reaction forces in a dictionary."""
return self._ild_reactions
@property
def ild_moment(self):
""" Returns the I.L.D. moment equation."""
return self._ild_moment
@property
def length(self):
"""Length of the Beam."""
return self._length
@length.setter
def length(self, l):
self._length = sympify(l)
@property
def area(self):
"""Cross-sectional area of the Beam. """
return self._area
@area.setter
def area(self, a):
self._area = sympify(a)
@property
def variable(self):
"""
A symbol that can be used as a variable along the length of the beam
while representing load distribution, shear force curve, bending
moment, slope curve and the deflection curve. By default, it is set
to ``Symbol('x')``, but this property is mutable.
Examples
========
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I, A = symbols('E, I, A')
>>> x, y, z = symbols('x, y, z')
>>> b = Beam(4, E, I)
>>> b.variable
x
>>> b.variable = y
>>> b.variable
y
>>> b = Beam(4, E, I, A, z)
>>> b.variable
z
"""
return self._variable
@variable.setter
def variable(self, v):
if isinstance(v, Symbol):
self._variable = v
else:
raise TypeError("""The variable should be a Symbol object.""")
@property
def elastic_modulus(self):
"""Young's Modulus of the Beam. """
return self._elastic_modulus
@elastic_modulus.setter
def elastic_modulus(self, e):
self._elastic_modulus = sympify(e)
@property
def second_moment(self):
"""Second moment of area of the Beam. """
return self._second_moment
@second_moment.setter
def second_moment(self, i):
self._cross_section = None
if isinstance(i, GeometryEntity):
raise ValueError("To update cross-section geometry use `cross_section` attribute")
else:
self._second_moment = sympify(i)
@property
def cross_section(self):
"""Cross-section of the beam"""
return self._cross_section
@cross_section.setter
def cross_section(self, s):
if s:
self._second_moment = s.second_moment_of_area()[0]
self._cross_section = s
@property
def boundary_conditions(self):
"""
Returns a dictionary of boundary conditions applied on the beam.
The dictionary has three keywords namely moment, slope and deflection.
The value of each keyword is a list of tuple, where each tuple
contains location and value of a boundary condition in the format
(location, value).
Examples
========
There is a beam of length 4 meters. The bending moment at 0 should be 4
and at 4 it should be 0. The slope of the beam should be 1 at 0. The
deflection should be 2 at 0.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.bc_deflection = [(0, 2)]
>>> b.bc_slope = [(0, 1)]
>>> b.boundary_conditions
{'deflection': [(0, 2)], 'slope': [(0, 1)]}
Here the deflection of the beam should be ``2`` at ``0``.
Similarly, the slope of the beam should be ``1`` at ``0``.
"""
return self._boundary_conditions
@property
def bc_slope(self):
return self._boundary_conditions['slope']
@bc_slope.setter
def bc_slope(self, s_bcs):
self._boundary_conditions['slope'] = s_bcs
@property
def bc_deflection(self):
return self._boundary_conditions['deflection']
@bc_deflection.setter
def bc_deflection(self, d_bcs):
self._boundary_conditions['deflection'] = d_bcs
def join(self, beam, via="fixed"):
"""
This method joins two beams to make a new composite beam system.
Passed Beam class instance is attached to the right end of calling
object. This method can be used to form beams having Discontinuous
values of Elastic modulus or Second moment.
Parameters
==========
beam : Beam class object
The Beam object which would be connected to the right of calling
object.
via : String
States the way two Beam object would get connected
- For axially fixed Beams, via="fixed"
- For Beams connected via hinge, via="hinge"
Examples
========
There is a cantilever beam of length 4 meters. For first 2 meters
its moment of inertia is `1.5*I` and `I` for the other end.
A pointload of magnitude 4 N is applied from the top at its free end.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b1 = Beam(2, E, 1.5*I)
>>> b2 = Beam(2, E, I)
>>> b = b1.join(b2, "fixed")
>>> b.apply_load(20, 4, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 0, -2)
>>> b.bc_slope = [(0, 0)]
>>> b.bc_deflection = [(0, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.load
80*SingularityFunction(x, 0, -2) - 20*SingularityFunction(x, 0, -1) + 20*SingularityFunction(x, 4, -1)
>>> b.slope()
(-((-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))/I + 120/I)/E + 80.0/(E*I))*SingularityFunction(x, 2, 0)
- 0.666666666666667*(-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))*SingularityFunction(x, 0, 0)/(E*I)
+ 0.666666666666667*(-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))*SingularityFunction(x, 2, 0)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
new_length = self.length + beam.length
if self.second_moment != beam.second_moment:
new_second_moment = Piecewise((self.second_moment, x<=self.length),
(beam.second_moment, x<=new_length))
else:
new_second_moment = self.second_moment
if via == "fixed":
new_beam = Beam(new_length, E, new_second_moment, x)
new_beam._composite_type = "fixed"
return new_beam
if via == "hinge":
new_beam = Beam(new_length, E, new_second_moment, x)
new_beam._composite_type = "hinge"
new_beam._hinge_position = self.length
return new_beam
def apply_support(self, loc, type="fixed"):
"""
This method applies support to a particular beam object.
Parameters
==========
loc : Sympifyable
Location of point at which support is applied.
type : String
Determines type of Beam support applied. To apply support structure
with
- zero degree of freedom, type = "fixed"
- one degree of freedom, type = "pin"
- two degrees of freedom, type = "roller"
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(30, E, I)
>>> b.apply_support(10, 'roller')
>>> b.apply_support(30, 'roller')
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(120, 30, -2)
>>> R_10, R_30 = symbols('R_10, R_30')
>>> b.solve_for_reaction_loads(R_10, R_30)
>>> b.load
-8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1)
+ 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1)
>>> b.slope()
(-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2)
+ 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I)
"""
loc = sympify(loc)
self._applied_supports.append((loc, type))
if type in ("pin", "roller"):
reaction_load = Symbol('R_'+str(loc))
self.apply_load(reaction_load, loc, -1)
self.bc_deflection.append((loc, 0))
else:
reaction_load = Symbol('R_'+str(loc))
reaction_moment = Symbol('M_'+str(loc))
self.apply_load(reaction_load, loc, -1)
self.apply_load(reaction_moment, loc, -2)
self.bc_deflection.append((loc, 0))
self.bc_slope.append((loc, 0))
self._support_as_loads.append((reaction_moment, loc, -2, None))
self._support_as_loads.append((reaction_load, loc, -1, None))
def apply_load(self, value, start, order, end=None):
"""
This method adds up the loads given to a particular beam object.
Parameters
==========
value : Sympifyable
The value inserted should have the units [Force/(Distance**(n+1)]
where n is the order of applied load.
Units for applied loads:
- For moments, unit = kN*m
- For point loads, unit = kN
- For constant distributed load, unit = kN/m
- For ramp loads, unit = kN/m/m
- For parabolic ramp loads, unit = kN/m/m/m
- ... so on.
start : Sympifyable
The starting point of the applied load. For point moments and
point forces this is the location of application.
order : Integer
The order of the applied load.
- For moments, order = -2
- For point loads, order =-1
- For constant distributed load, order = 0
- For ramp loads, order = 1
- For parabolic ramp loads, order = 2
- ... so on.
end : Sympifyable, optional
An optional argument that can be used if the load has an end point
within the length of the beam.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A point load of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 2 meters to 3 meters
away from the starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 2, 2, end=3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 2, 2) + 2*SingularityFunction(x, 3, 0) + 4*SingularityFunction(x, 3, 1) + 2*SingularityFunction(x, 3, 2)
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
self._applied_loads.append((value, start, order, end))
self._load += value*SingularityFunction(x, start, order)
self._original_load += value*SingularityFunction(x, start, order)
if end:
# load has an end point within the length of the beam.
self._handle_end(x, value, start, order, end, type="apply")
def remove_load(self, value, start, order, end=None):
"""
This method removes a particular load present on the beam object.
Returns a ValueError if the load passed as an argument is not
present on the beam.
Parameters
==========
value : Sympifyable
The magnitude of an applied load.
start : Sympifyable
The starting point of the applied load. For point moments and
point forces this is the location of application.
order : Integer
The order of the applied load.
- For moments, order= -2
- For point loads, order=-1
- For constant distributed load, order=0
- For ramp loads, order=1
- For parabolic ramp loads, order=2
- ... so on.
end : Sympifyable, optional
An optional argument that can be used if the load has an end point
within the length of the beam.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A pointload of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 2 meters to 3 meters
away from the starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 2, 2, end=3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 2, 2) + 2*SingularityFunction(x, 3, 0) + 4*SingularityFunction(x, 3, 1) + 2*SingularityFunction(x, 3, 2)
>>> b.remove_load(-2, 2, 2, end = 3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1)
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if (value, start, order, end) in self._applied_loads:
self._load -= value*SingularityFunction(x, start, order)
self._original_load -= value*SingularityFunction(x, start, order)
self._applied_loads.remove((value, start, order, end))
else:
msg = "No such load distribution exists on the beam object."
raise ValueError(msg)
if end:
# load has an end point within the length of the beam.
self._handle_end(x, value, start, order, end, type="remove")
def _handle_end(self, x, value, start, order, end, type):
"""
This functions handles the optional `end` value in the
`apply_load` and `remove_load` functions. When the value
of end is not NULL, this function will be executed.
"""
if order.is_negative:
msg = ("If 'end' is provided the 'order' of the load cannot "
"be negative, i.e. 'end' is only valid for distributed "
"loads.")
raise ValueError(msg)
# NOTE : A Taylor series can be used to define the summation of
# singularity functions that subtract from the load past the end
# point such that it evaluates to zero past 'end'.
f = value*x**order
if type == "apply":
# iterating for "apply_load" method
for i in range(0, order + 1):
self._load -= (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
self._original_load -= (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
elif type == "remove":
# iterating for "remove_load" method
for i in range(0, order + 1):
self._load += (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
self._original_load += (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
@property
def load(self):
"""
Returns a Singularity Function expression which represents
the load distribution curve of the Beam object.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A point load of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 3 meters away from the
starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 3, 2)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 3, 2)
"""
return self._load
@property
def applied_loads(self):
"""
Returns a list of all loads applied on the beam object.
Each load in the list is a tuple of form (value, start, order, end).
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A pointload of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point. Another pointload of magnitude 5 N
is applied at same position.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(5, 2, -1)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 9*SingularityFunction(x, 2, -1)
>>> b.applied_loads
[(-3, 0, -2, None), (4, 2, -1, None), (5, 2, -1, None)]
"""
return self._applied_loads
def _solve_hinge_beams(self, *reactions):
"""Method to find integration constants and reactional variables in a
composite beam connected via hinge.
This method resolves the composite Beam into its sub-beams and then
equations of shear force, bending moment, slope and deflection are
evaluated for both of them separately. These equations are then solved
for unknown reactions and integration constants using the boundary
conditions applied on the Beam. Equal deflection of both sub-beams
at the hinge joint gives us another equation to solve the system.
Examples
========
A combined beam, with constant fkexural rigidity E*I, is formed by joining
a Beam of length 2*l to the right of another Beam of length l. The whole beam
is fixed at both of its both end. A point load of magnitude P is also applied
from the top at a distance of 2*l from starting point.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> l=symbols('l', positive=True)
>>> b1=Beam(l, E, I)
>>> b2=Beam(2*l, E, I)
>>> b=b1.join(b2,"hinge")
>>> M1, A1, M2, A2, P = symbols('M1 A1 M2 A2 P')
>>> b.apply_load(A1,0,-1)
>>> b.apply_load(M1,0,-2)
>>> b.apply_load(P,2*l,-1)
>>> b.apply_load(A2,3*l,-1)
>>> b.apply_load(M2,3*l,-2)
>>> b.bc_slope=[(0,0), (3*l, 0)]
>>> b.bc_deflection=[(0,0), (3*l, 0)]
>>> b.solve_for_reaction_loads(M1, A1, M2, A2)
>>> b.reaction_loads
{A1: -5*P/18, A2: -13*P/18, M1: 5*P*l/18, M2: -4*P*l/9}
>>> b.slope()
(5*P*l*SingularityFunction(x, 0, 1)/18 - 5*P*SingularityFunction(x, 0, 2)/36 + 5*P*SingularityFunction(x, l, 2)/36)*SingularityFunction(x, 0, 0)/(E*I)
- (5*P*l*SingularityFunction(x, 0, 1)/18 - 5*P*SingularityFunction(x, 0, 2)/36 + 5*P*SingularityFunction(x, l, 2)/36)*SingularityFunction(x, l, 0)/(E*I)
+ (P*l**2/18 - 4*P*l*SingularityFunction(-l + x, 2*l, 1)/9 - 5*P*SingularityFunction(-l + x, 0, 2)/36 + P*SingularityFunction(-l + x, l, 2)/2
- 13*P*SingularityFunction(-l + x, 2*l, 2)/36)*SingularityFunction(x, l, 0)/(E*I)
>>> b.deflection()
(5*P*l*SingularityFunction(x, 0, 2)/36 - 5*P*SingularityFunction(x, 0, 3)/108 + 5*P*SingularityFunction(x, l, 3)/108)*SingularityFunction(x, 0, 0)/(E*I)
- (5*P*l*SingularityFunction(x, 0, 2)/36 - 5*P*SingularityFunction(x, 0, 3)/108 + 5*P*SingularityFunction(x, l, 3)/108)*SingularityFunction(x, l, 0)/(E*I)
+ (5*P*l**3/54 + P*l**2*(-l + x)/18 - 2*P*l*SingularityFunction(-l + x, 2*l, 2)/9 - 5*P*SingularityFunction(-l + x, 0, 3)/108 + P*SingularityFunction(-l + x, l, 3)/6
- 13*P*SingularityFunction(-l + x, 2*l, 3)/108)*SingularityFunction(x, l, 0)/(E*I)
"""
x = self.variable
l = self._hinge_position
E = self._elastic_modulus
I = self._second_moment
if isinstance(I, Piecewise):
I1 = I.args[0][0]
I2 = I.args[1][0]
else:
I1 = I2 = I
load_1 = 0 # Load equation on first segment of composite beam
load_2 = 0 # Load equation on second segment of composite beam
# Distributing load on both segments
for load in self.applied_loads:
if load[1] < l:
load_1 += load[0]*SingularityFunction(x, load[1], load[2])
if load[2] == 0:
load_1 -= load[0]*SingularityFunction(x, load[3], load[2])
elif load[2] > 0:
load_1 -= load[0]*SingularityFunction(x, load[3], load[2]) + load[0]*SingularityFunction(x, load[3], 0)
elif load[1] == l:
load_1 += load[0]*SingularityFunction(x, load[1], load[2])
load_2 += load[0]*SingularityFunction(x, load[1] - l, load[2])
elif load[1] > l:
load_2 += load[0]*SingularityFunction(x, load[1] - l, load[2])
if load[2] == 0:
load_2 -= load[0]*SingularityFunction(x, load[3] - l, load[2])
elif load[2] > 0:
load_2 -= load[0]*SingularityFunction(x, load[3] - l, load[2]) + load[0]*SingularityFunction(x, load[3] - l, 0)
h = Symbol('h') # Force due to hinge
load_1 += h*SingularityFunction(x, l, -1)
load_2 -= h*SingularityFunction(x, 0, -1)
eq = []
shear_1 = integrate(load_1, x)
shear_curve_1 = limit(shear_1, x, l)
eq.append(shear_curve_1)
bending_1 = integrate(shear_1, x)
moment_curve_1 = limit(bending_1, x, l)
eq.append(moment_curve_1)
shear_2 = integrate(load_2, x)
shear_curve_2 = limit(shear_2, x, self.length - l)
eq.append(shear_curve_2)
bending_2 = integrate(shear_2, x)
moment_curve_2 = limit(bending_2, x, self.length - l)
eq.append(moment_curve_2)
C1 = Symbol('C1')
C2 = Symbol('C2')
C3 = Symbol('C3')
C4 = Symbol('C4')
slope_1 = S.One/(E*I1)*(integrate(bending_1, x) + C1)
def_1 = S.One/(E*I1)*(integrate((E*I)*slope_1, x) + C1*x + C2)
slope_2 = S.One/(E*I2)*(integrate(integrate(integrate(load_2, x), x), x) + C3)
def_2 = S.One/(E*I2)*(integrate((E*I)*slope_2, x) + C4)
for position, value in self.bc_slope:
if position<l:
eq.append(slope_1.subs(x, position) - value)
else:
eq.append(slope_2.subs(x, position - l) - value)
for position, value in self.bc_deflection:
if position<l:
eq.append(def_1.subs(x, position) - value)
else:
eq.append(def_2.subs(x, position - l) - value)
eq.append(def_1.subs(x, l) - def_2.subs(x, 0)) # Deflection of both the segments at hinge would be equal
constants = list(linsolve(eq, C1, C2, C3, C4, h, *reactions))
reaction_values = list(constants[0])[5:]
self._reaction_loads = dict(zip(reactions, reaction_values))
self._load = self._load.subs(self._reaction_loads)
# Substituting constants and reactional load and moments with their corresponding values
slope_1 = slope_1.subs({C1: constants[0][0], h:constants[0][4]}).subs(self._reaction_loads)
def_1 = def_1.subs({C1: constants[0][0], C2: constants[0][1], h:constants[0][4]}).subs(self._reaction_loads)
slope_2 = slope_2.subs({x: x-l, C3: constants[0][2], h:constants[0][4]}).subs(self._reaction_loads)
def_2 = def_2.subs({x: x-l,C3: constants[0][2], C4: constants[0][3], h:constants[0][4]}).subs(self._reaction_loads)
self._hinge_beam_slope = slope_1*SingularityFunction(x, 0, 0) - slope_1*SingularityFunction(x, l, 0) + slope_2*SingularityFunction(x, l, 0)
self._hinge_beam_deflection = def_1*SingularityFunction(x, 0, 0) - def_1*SingularityFunction(x, l, 0) + def_2*SingularityFunction(x, l, 0)
def solve_for_reaction_loads(self, *reactions):
"""
Solves for the reaction forces.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1) # Reaction force at x = 10
>>> b.apply_load(R2, 30, -1) # Reaction force at x = 30
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.load
R1*SingularityFunction(x, 10, -1) + R2*SingularityFunction(x, 30, -1)
- 8*SingularityFunction(x, 0, -1) + 120*SingularityFunction(x, 30, -2)
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.reaction_loads
{R1: 6, R2: 2}
>>> b.load
-8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1)
+ 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1)
"""
if self._composite_type == "hinge":
return self._solve_hinge_beams(*reactions)
x = self.variable
l = self.length
C3 = Symbol('C3')
C4 = Symbol('C4')
shear_curve = limit(self.shear_force(), x, l)
moment_curve = limit(self.bending_moment(), x, l)
slope_eqs = []
deflection_eqs = []
slope_curve = integrate(self.bending_moment(), x) + C3
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
slope_eqs.append(eqs)
deflection_curve = integrate(slope_curve, x) + C4
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
deflection_eqs.append(eqs)
solution = list((linsolve([shear_curve, moment_curve] + slope_eqs
+ deflection_eqs, (C3, C4) + reactions).args)[0])
solution = solution[2:]
self._reaction_loads = dict(zip(reactions, solution))
self._load = self._load.subs(self._reaction_loads)
def shear_force(self):
"""
Returns a Singularity Function expression which represents
the shear force curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.shear_force()
8*SingularityFunction(x, 0, 0) - 6*SingularityFunction(x, 10, 0) - 120*SingularityFunction(x, 30, -1) - 2*SingularityFunction(x, 30, 0)
"""
x = self.variable
return -integrate(self.load, x)
def max_shear_force(self):
"""Returns maximum Shear force and its coordinate
in the Beam object."""
shear_curve = self.shear_force()
x = self.variable
terms = shear_curve.args
singularity = [] # Points at which shear function changes
for term in terms:
if isinstance(term, Mul):
term = term.args[-1] # SingularityFunction in the term
singularity.append(term.args[1])
singularity.sort()
singularity = list(set(singularity))
intervals = [] # List of Intervals with discrete value of shear force
shear_values = [] # List of values of shear force in each interval
for i, s in enumerate(singularity):
if s == 0:
continue
try:
shear_slope = Piecewise((float("nan"), x<=singularity[i-1]),(self._load.rewrite(Piecewise), x<s), (float("nan"), True))
points = solve(shear_slope, x)
val = []
for point in points:
val.append(abs(shear_curve.subs(x, point)))
points.extend([singularity[i-1], s])
val += [abs(limit(shear_curve, x, singularity[i-1], '+')), abs(limit(shear_curve, x, s, '-'))]
max_shear = max(val)
shear_values.append(max_shear)
intervals.append(points[val.index(max_shear)])
# If shear force in a particular Interval has zero or constant
# slope, then above block gives NotImplementedError as
# solve can't represent Interval solutions.
except NotImplementedError:
initial_shear = limit(shear_curve, x, singularity[i-1], '+')
final_shear = limit(shear_curve, x, s, '-')
# If shear_curve has a constant slope(it is a line).
if shear_curve.subs(x, (singularity[i-1] + s)/2) == (initial_shear + final_shear)/2 and initial_shear != final_shear:
shear_values.extend([initial_shear, final_shear])
intervals.extend([singularity[i-1], s])
else: # shear_curve has same value in whole Interval
shear_values.append(final_shear)
intervals.append(Interval(singularity[i-1], s))
shear_values = list(map(abs, shear_values))
maximum_shear = max(shear_values)
point = intervals[shear_values.index(maximum_shear)]
return (point, maximum_shear)
def bending_moment(self):
"""
Returns a Singularity Function expression which represents
the bending moment curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.bending_moment()
8*SingularityFunction(x, 0, 1) - 6*SingularityFunction(x, 10, 1) - 120*SingularityFunction(x, 30, 0) - 2*SingularityFunction(x, 30, 1)
"""
x = self.variable
return integrate(self.shear_force(), x)
def max_bmoment(self):
"""Returns maximum Shear force and its coordinate
in the Beam object."""
bending_curve = self.bending_moment()
x = self.variable
terms = bending_curve.args
singularity = [] # Points at which bending moment changes
for term in terms:
if isinstance(term, Mul):
term = term.args[-1] # SingularityFunction in the term
singularity.append(term.args[1])
singularity.sort()
singularity = list(set(singularity))
intervals = [] # List of Intervals with discrete value of bending moment
moment_values = [] # List of values of bending moment in each interval
for i, s in enumerate(singularity):
if s == 0:
continue
try:
moment_slope = Piecewise((float("nan"), x<=singularity[i-1]),(self.shear_force().rewrite(Piecewise), x<s), (float("nan"), True))
points = solve(moment_slope, x)
val = []
for point in points:
val.append(abs(bending_curve.subs(x, point)))
points.extend([singularity[i-1], s])
val += [abs(limit(bending_curve, x, singularity[i-1], '+')), abs(limit(bending_curve, x, s, '-'))]
max_moment = max(val)
moment_values.append(max_moment)
intervals.append(points[val.index(max_moment)])
# If bending moment in a particular Interval has zero or constant
# slope, then above block gives NotImplementedError as solve
# can't represent Interval solutions.
except NotImplementedError:
initial_moment = limit(bending_curve, x, singularity[i-1], '+')
final_moment = limit(bending_curve, x, s, '-')
# If bending_curve has a constant slope(it is a line).
if bending_curve.subs(x, (singularity[i-1] + s)/2) == (initial_moment + final_moment)/2 and initial_moment != final_moment:
moment_values.extend([initial_moment, final_moment])
intervals.extend([singularity[i-1], s])
else: # bending_curve has same value in whole Interval
moment_values.append(final_moment)
intervals.append(Interval(singularity[i-1], s))
moment_values = list(map(abs, moment_values))
maximum_moment = max(moment_values)
point = intervals[moment_values.index(maximum_moment)]
return (point, maximum_moment)
def point_cflexure(self):
"""
Returns a Set of point(s) with zero bending moment and
where bending moment curve of the beam object changes
its sign from negative to positive or vice versa.
Examples
========
There is is 10 meter long overhanging beam. There are
two simple supports below the beam. One at the start
and another one at a distance of 6 meters from the start.
Point loads of magnitude 10KN and 20KN are applied at
2 meters and 4 meters from start respectively. A Uniformly
distribute load of magnitude of magnitude 3KN/m is also
applied on top starting from 6 meters away from starting
point till end.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(10, E, I)
>>> b.apply_load(-4, 0, -1)
>>> b.apply_load(-46, 6, -1)
>>> b.apply_load(10, 2, -1)
>>> b.apply_load(20, 4, -1)
>>> b.apply_load(3, 6, 0)
>>> b.point_cflexure()
[10/3]
"""
# To restrict the range within length of the Beam
moment_curve = Piecewise((float("nan"), self.variable<=0),
(self.bending_moment(), self.variable<self.length),
(float("nan"), True))
points = solve(moment_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
return points
def slope(self):
"""
Returns a Singularity Function expression which represents
the slope the elastic curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.slope()
(-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2)
+ 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
I = self.second_moment
if self._composite_type == "hinge":
return self._hinge_beam_slope
if not self._boundary_conditions['slope']:
return diff(self.deflection(), x)
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
slope = 0
prev_slope = 0
prev_end = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
if i != len(args) - 1:
slope += (prev_slope + slope_value)*SingularityFunction(x, prev_end, 0) - \
(prev_slope + slope_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
slope += (prev_slope + slope_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
return slope
C3 = Symbol('C3')
slope_curve = -integrate(S.One/(E*I)*self.bending_moment(), x) + C3
bc_eqs = []
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, C3))
slope_curve = slope_curve.subs({C3: constants[0][0]})
return slope_curve
def deflection(self):
"""
Returns a Singularity Function expression which represents
the elastic curve or deflection of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.deflection()
(4000*x/3 - 4*SingularityFunction(x, 0, 3)/3 + SingularityFunction(x, 10, 3)
+ 60*SingularityFunction(x, 30, 2) + SingularityFunction(x, 30, 3)/3 - 12000)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
I = self.second_moment
if self._composite_type == "hinge":
return self._hinge_beam_deflection
if not self._boundary_conditions['deflection'] and not self._boundary_conditions['slope']:
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
base_char = self._base_char
constants = symbols(base_char + '3:5')
return S.One/(E*I)*integrate(-integrate(self.bending_moment(), x), x) + constants[0]*x + constants[1]
elif not self._boundary_conditions['deflection']:
base_char = self._base_char
constant = symbols(base_char + '4')
return integrate(self.slope(), x) + constant
elif not self._boundary_conditions['slope'] and self._boundary_conditions['deflection']:
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
base_char = self._base_char
C3, C4 = symbols(base_char + '3:5') # Integration constants
slope_curve = -integrate(self.bending_moment(), x) + C3
deflection_curve = integrate(slope_curve, x) + C4
bc_eqs = []
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, (C3, C4)))
deflection_curve = deflection_curve.subs({C3: constants[0][0], C4: constants[0][1]})
return S.One/(E*I)*deflection_curve
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
C4 = Symbol('C4')
deflection_curve = integrate(self.slope(), x) + C4
bc_eqs = []
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, C4))
deflection_curve = deflection_curve.subs({C4: constants[0][0]})
return deflection_curve
def max_deflection(self):
"""
Returns point of max deflection and its corresponding deflection value
in a Beam object.
"""
# To restrict the range within length of the Beam
slope_curve = Piecewise((float("nan"), self.variable<=0),
(self.slope(), self.variable<self.length),
(float("nan"), True))
points = solve(slope_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
deflection_curve = self.deflection()
deflections = [deflection_curve.subs(self.variable, x) for x in points]
deflections = list(map(abs, deflections))
if len(deflections) != 0:
max_def = max(deflections)
return (points[deflections.index(max_def)], max_def)
else:
return None
def shear_stress(self):
"""
Returns an expression representing the Shear Stress
curve of the Beam object.
"""
return self.shear_force()/self._area
def plot_shear_stress(self, subs=None):
"""
Returns a plot of shear stress present in the beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters and area of cross section 2 square
meters. A constant distributed load of 10 KN/m is applied from half of
the beam till the end. There are two simple supports below the beam,
one at the starting point and another at the ending point of the beam.
A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6), 2)
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_shear_stress()
Plot object containing:
[0]: cartesian line: 6875*SingularityFunction(x, 0, 0) - 2500*SingularityFunction(x, 2, 0)
- 5000*SingularityFunction(x, 4, 1) + 15625*SingularityFunction(x, 8, 0)
+ 5000*SingularityFunction(x, 8, 1) for x over (0.0, 8.0)
"""
shear_stress = self.shear_stress()
x = self.variable
length = self.length
if subs is None:
subs = {}
for sym in shear_stress.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('value of %s was not passed.' %sym)
if length in subs:
length = subs[length]
# Returns Plot of Shear Stress
return plot (shear_stress.subs(subs), (x, 0, length),
title='Shear Stress', xlabel=r'$\mathrm{x}$', ylabel=r'$\tau$',
line_color='r')
def plot_shear_force(self, subs=None):
"""
Returns a plot for Shear force present in the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_shear_force()
Plot object containing:
[0]: cartesian line: 13750*SingularityFunction(x, 0, 0) - 5000*SingularityFunction(x, 2, 0)
- 10000*SingularityFunction(x, 4, 1) + 31250*SingularityFunction(x, 8, 0)
+ 10000*SingularityFunction(x, 8, 1) for x over (0.0, 8.0)
"""
shear_force = self.shear_force()
if subs is None:
subs = {}
for sym in shear_force.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_force.subs(subs), (self.variable, 0, length), title='Shear Force',
xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{V}$', line_color='g')
def plot_bending_moment(self, subs=None):
"""
Returns a plot for Bending moment present in the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_bending_moment()
Plot object containing:
[0]: cartesian line: 13750*SingularityFunction(x, 0, 1) - 5000*SingularityFunction(x, 2, 1)
- 5000*SingularityFunction(x, 4, 2) + 31250*SingularityFunction(x, 8, 1)
+ 5000*SingularityFunction(x, 8, 2) for x over (0.0, 8.0)
"""
bending_moment = self.bending_moment()
if subs is None:
subs = {}
for sym in bending_moment.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(bending_moment.subs(subs), (self.variable, 0, length), title='Bending Moment',
xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{M}$', line_color='b')
def plot_slope(self, subs=None):
"""
Returns a plot for slope of deflection curve of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_slope()
Plot object containing:
[0]: cartesian line: -8.59375e-5*SingularityFunction(x, 0, 2) + 3.125e-5*SingularityFunction(x, 2, 2)
+ 2.08333333333333e-5*SingularityFunction(x, 4, 3) - 0.0001953125*SingularityFunction(x, 8, 2)
- 2.08333333333333e-5*SingularityFunction(x, 8, 3) + 0.00138541666666667 for x over (0.0, 8.0)
"""
slope = self.slope()
if subs is None:
subs = {}
for sym in slope.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(slope.subs(subs), (self.variable, 0, length), title='Slope',
xlabel=r'$\mathrm{x}$', ylabel=r'$\theta$', line_color='m')
def plot_deflection(self, subs=None):
"""
Returns a plot for deflection curve of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_deflection()
Plot object containing:
[0]: cartesian line: 0.00138541666666667*x - 2.86458333333333e-5*SingularityFunction(x, 0, 3)
+ 1.04166666666667e-5*SingularityFunction(x, 2, 3) + 5.20833333333333e-6*SingularityFunction(x, 4, 4)
- 6.51041666666667e-5*SingularityFunction(x, 8, 3) - 5.20833333333333e-6*SingularityFunction(x, 8, 4)
for x over (0.0, 8.0)
"""
deflection = self.deflection()
if subs is None:
subs = {}
for sym in deflection.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(deflection.subs(subs), (self.variable, 0, length),
title='Deflection', xlabel=r'$\mathrm{x}$', ylabel=r'$\delta$',
line_color='r')
def plot_loading_results(self, subs=None):
"""
Returns a subplot of Shear Force, Bending Moment,
Slope and Deflection of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> axes = b.plot_loading_results()
"""
length = self.length
variable = self.variable
if subs is None:
subs = {}
for sym in self.deflection().atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if length in subs:
length = subs[length]
ax1 = plot(self.shear_force().subs(subs), (variable, 0, length),
title="Shear Force", xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{V}$',
line_color='g', show=False)
ax2 = plot(self.bending_moment().subs(subs), (variable, 0, length),
title="Bending Moment", xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{M}$',
line_color='b', show=False)
ax3 = plot(self.slope().subs(subs), (variable, 0, length),
title="Slope", xlabel=r'$\mathrm{x}$', ylabel=r'$\theta$',
line_color='m', show=False)
ax4 = plot(self.deflection().subs(subs), (variable, 0, length),
title="Deflection", xlabel=r'$\mathrm{x}$', ylabel=r'$\delta$',
line_color='r', show=False)
return PlotGrid(4, 1, ax1, ax2, ax3, ax4)
def _solve_for_ild_equations(self):
"""
Helper function for I.L.D. It takes the unsubstituted
copy of the load equation and uses it to calculate shear force and bending
moment equations.
"""
x = self.variable
shear_force = -integrate(self._original_load, x)
bending_moment = integrate(shear_force, x)
return shear_force, bending_moment
def solve_for_ild_reactions(self, value, *reactions):
"""
Determines the Influence Line Diagram equations for reaction
forces under the effect of a moving load.
Parameters
==========
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 10 meters. There are two simple supports
below the beam, one at the starting point and another at the ending
point of the beam. Calculate the I.L.D. equations for reaction forces
under the effect of a moving load of magnitude 1kN.
.. image:: ildreaction.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_10 = symbols('R_0, R_10')
>>> b = Beam(10, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(10, 'roller')
>>> b.solve_for_ild_reactions(1,R_0,R_10)
>>> b.ild_reactions
{R_0: x/10 - 1, R_10: -x/10}
"""
shear_force, bending_moment = self._solve_for_ild_equations()
x = self.variable
l = self.length
C3 = Symbol('C3')
C4 = Symbol('C4')
shear_curve = limit(shear_force, x, l) - value
moment_curve = limit(bending_moment, x, l) - value*(l-x)
slope_eqs = []
deflection_eqs = []
slope_curve = integrate(bending_moment, x) + C3
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
slope_eqs.append(eqs)
deflection_curve = integrate(slope_curve, x) + C4
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
deflection_eqs.append(eqs)
solution = list((linsolve([shear_curve, moment_curve] + slope_eqs
+ deflection_eqs, (C3, C4) + reactions).args)[0])
solution = solution[2:]
# Determining the equations and solving them.
self._ild_reactions = dict(zip(reactions, solution))
def plot_ild_reactions(self, subs=None):
"""
Plots the Influence Line Diagram of Reaction Forces
under the effect of a moving load. This function
should be called after calling solve_for_ild_reactions().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 10 meters. A point load of magnitude 5KN
is also applied from top of the beam, at a distance of 4 meters
from the starting point. There are two simple supports below the
beam, located at the starting point and at a distance of 7 meters
from the starting point. Plot the I.L.D. equations for reactions
at both support points under the effect of a moving load
of magnitude 1kN.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_7 = symbols('R_0, R_7')
>>> b = Beam(10, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(7, 'roller')
>>> b.apply_load(5,4,-1)
>>> b.solve_for_ild_reactions(1,R_0,R_7)
>>> b.ild_reactions
{R_0: x/7 - 22/7, R_7: -x/7 - 20/7}
>>> b.plot_ild_reactions()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: x/7 - 22/7 for x over (0.0, 10.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -x/7 - 20/7 for x over (0.0, 10.0)
"""
if not self._ild_reactions:
raise ValueError("I.L.D. reaction equations not found. Please use solve_for_ild_reactions() to generate the I.L.D. reaction equations.")
x = self.variable
ildplots = []
if subs is None:
subs = {}
for reaction in self._ild_reactions:
for sym in self._ild_reactions[reaction].atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for reaction in self._ild_reactions:
ildplots.append(plot(self._ild_reactions[reaction].subs(subs),
(x, 0, self._length.subs(subs)), title='I.L.D. for Reactions',
xlabel=x, ylabel=reaction, line_color='blue', show=False))
return PlotGrid(len(ildplots), 1, *ildplots)
def solve_for_ild_shear(self, distance, value, *reactions):
"""
Determines the Influence Line Diagram equations for shear at a
specified point under the effect of a moving load.
Parameters
==========
distance : Integer
Distance of the point from the start of the beam
for which equations are to be determined
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Calculate the I.L.D. equations for Shear at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_shear(4, 1, R_0, R_8)
>>> b.ild_shear
Piecewise((x/8, x < 4), (x/8 - 1, x > 4))
"""
x = self.variable
l = self.length
shear_force, _ = self._solve_for_ild_equations()
shear_curve1 = value - limit(shear_force, x, distance)
shear_curve2 = (limit(shear_force, x, l) - limit(shear_force, x, distance)) - value
for reaction in reactions:
shear_curve1 = shear_curve1.subs(reaction,self._ild_reactions[reaction])
shear_curve2 = shear_curve2.subs(reaction,self._ild_reactions[reaction])
shear_eq = Piecewise((shear_curve1, x < distance), (shear_curve2, x > distance))
self._ild_shear = shear_eq
def plot_ild_shear(self,subs=None):
"""
Plots the Influence Line Diagram for Shear under the effect
of a moving load. This function should be called after
calling solve_for_ild_shear().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Plot the I.L.D. for Shear at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_shear(4, 1, R_0, R_8)
>>> b.ild_shear
Piecewise((x/8, x < 4), (x/8 - 1, x > 4))
>>> b.plot_ild_shear()
Plot object containing:
[0]: cartesian line: Piecewise((x/8, x < 4), (x/8 - 1, x > 4)) for x over (0.0, 12.0)
"""
if not self._ild_shear:
raise ValueError("I.L.D. shear equation not found. Please use solve_for_ild_shear() to generate the I.L.D. shear equations.")
x = self.variable
l = self._length
if subs is None:
subs = {}
for sym in self._ild_shear.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
return plot(self._ild_shear.subs(subs), (x, 0, l), title='I.L.D. for Shear',
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{V}$', line_color='blue',show=True)
def solve_for_ild_moment(self, distance, value, *reactions):
"""
Determines the Influence Line Diagram equations for moment at a
specified point under the effect of a moving load.
Parameters
==========
distance : Integer
Distance of the point from the start of the beam
for which equations are to be determined
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Calculate the I.L.D. equations for Moment at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_moment(4, 1, R_0, R_8)
>>> b.ild_moment
Piecewise((-x/2, x < 4), (x/2 - 4, x > 4))
"""
x = self.variable
l = self.length
_, moment = self._solve_for_ild_equations()
moment_curve1 = value*(distance-x) - limit(moment, x, distance)
moment_curve2= (limit(moment, x, l)-limit(moment, x, distance))-value*(l-x)
for reaction in reactions:
moment_curve1 = moment_curve1.subs(reaction, self._ild_reactions[reaction])
moment_curve2 = moment_curve2.subs(reaction, self._ild_reactions[reaction])
moment_eq = Piecewise((moment_curve1, x < distance), (moment_curve2, x > distance))
self._ild_moment = moment_eq
def plot_ild_moment(self,subs=None):
"""
Plots the Influence Line Diagram for Moment under the effect
of a moving load. This function should be called after
calling solve_for_ild_moment().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Plot the I.L.D. for Moment at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_moment(4, 1, R_0, R_8)
>>> b.ild_moment
Piecewise((-x/2, x < 4), (x/2 - 4, x > 4))
>>> b.plot_ild_moment()
Plot object containing:
[0]: cartesian line: Piecewise((-x/2, x < 4), (x/2 - 4, x > 4)) for x over (0.0, 12.0)
"""
if not self._ild_moment:
raise ValueError("I.L.D. moment equation not found. Please use solve_for_ild_moment() to generate the I.L.D. moment equations.")
x = self.variable
if subs is None:
subs = {}
for sym in self._ild_moment.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
return plot(self._ild_moment.subs(subs), (x, 0, self._length), title='I.L.D. for Moment',
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{M}$', line_color='blue', show=True)
@doctest_depends_on(modules=('numpy',))
def draw(self, pictorial=True):
"""
Returns a plot object representing the beam diagram of the beam.
.. note::
The user must be careful while entering load values.
The draw function assumes a sign convention which is used
for plotting loads.
Given a right handed coordinate system with XYZ coordinates,
the beam's length is assumed to be along the positive X axis.
The draw function recognizes positve loads(with n>-2) as loads
acting along negative Y direction and positve moments acting
along positive Z direction.
Parameters
==========
pictorial: Boolean (default=True)
Setting ``pictorial=True`` would simply create a pictorial (scaled) view
of the beam diagram not with the exact dimensions.
Although setting ``pictorial=False`` would create a beam diagram with
the exact dimensions on the plot
Examples
========
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> E, I = symbols('E, I')
>>> b = Beam(50, 20, 30)
>>> b.apply_load(10, 2, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(90, 5, 0, 23)
>>> b.apply_load(10, 30, 1, 50)
>>> b.apply_support(50, "pin")
>>> b.apply_support(0, "fixed")
>>> b.apply_support(20, "roller")
>>> p = b.draw()
>>> p
Plot object containing:
[0]: cartesian line: 25*SingularityFunction(x, 5, 0) - 25*SingularityFunction(x, 23, 0)
+ SingularityFunction(x, 30, 1) - 20*SingularityFunction(x, 50, 0)
- SingularityFunction(x, 50, 1) + 5 for x over (0.0, 50.0)
[1]: cartesian line: 5 for x over (0.0, 50.0)
>>> p.show()
"""
if not numpy:
raise ImportError("To use this function numpy module is required")
x = self.variable
# checking whether length is an expression in terms of any Symbol.
if isinstance(self.length, Expr):
l = list(self.length.atoms(Symbol))
# assigning every Symbol a default value of 10
l = {i:10 for i in l}
length = self.length.subs(l)
else:
l = {}
length = self.length
height = length/10
rectangles = []
rectangles.append({'xy':(0, 0), 'width':length, 'height': height, 'facecolor':"brown"})
annotations, markers, load_eq,load_eq1, fill = self._draw_load(pictorial, length, l)
support_markers, support_rectangles = self._draw_supports(length, l)
rectangles += support_rectangles
markers += support_markers
sing_plot = plot(height + load_eq, height + load_eq1, (x, 0, length),
xlim=(-height, length + height), ylim=(-length, 1.25*length), annotations=annotations,
markers=markers, rectangles=rectangles, line_color='brown', fill=fill, axis=False, show=False)
return sing_plot
def _draw_load(self, pictorial, length, l):
loads = list(set(self.applied_loads) - set(self._support_as_loads))
height = length/10
x = self.variable
annotations = []
markers = []
load_args = []
scaled_load = 0
load_args1 = []
scaled_load1 = 0
load_eq = 0 # For positive valued higher order loads
load_eq1 = 0 # For negative valued higher order loads
fill = None
plus = 0 # For positive valued higher order loads
minus = 0 # For negative valued higher order loads
for load in loads:
# check if the position of load is in terms of the beam length.
if l:
pos = load[1].subs(l)
else:
pos = load[1]
# point loads
if load[2] == -1:
if isinstance(load[0], Symbol) or load[0].is_negative:
annotations.append({'text':'', 'xy':(pos, 0), 'xytext':(pos, height - 4*height), 'arrowprops':dict(width= 1.5, headlength=5, headwidth=5, facecolor='black')})
else:
annotations.append({'text':'', 'xy':(pos, height), 'xytext':(pos, height*4), 'arrowprops':dict(width= 1.5, headlength=4, headwidth=4, facecolor='black')})
# moment loads
elif load[2] == -2:
if load[0].is_negative:
markers.append({'args':[[pos], [height/2]], 'marker': r'$\circlearrowright$', 'markersize':15})
else:
markers.append({'args':[[pos], [height/2]], 'marker': r'$\circlearrowleft$', 'markersize':15})
# higher order loads
elif load[2] >= 0:
# `fill` will be assigned only when higher order loads are present
value, start, order, end = load
# Positive loads have their seperate equations
if(value>0):
plus = 1
# if pictorial is True we remake the load equation again with
# some constant magnitude values.
if pictorial:
value = 10**(1-order) if order > 0 else length/2
scaled_load += value*SingularityFunction(x, start, order)
if end:
f2 = 10**(1-order)*x**order if order > 0 else length/2*x**order
for i in range(0, order + 1):
scaled_load -= (f2.diff(x, i).subs(x, end - start)*
SingularityFunction(x, end, i)/factorial(i))
if pictorial:
if isinstance(scaled_load, Add):
load_args = scaled_load.args
else:
# when the load equation consists of only a single term
load_args = (scaled_load,)
load_eq = [i.subs(l) for i in load_args]
else:
if isinstance(self.load, Add):
load_args = self.load.args
else:
load_args = (self.load,)
load_eq = [i.subs(l) for i in load_args if list(i.atoms(SingularityFunction))[0].args[2] >= 0]
load_eq = Add(*load_eq)
# filling higher order loads with colour
expr = height + load_eq.rewrite(Piecewise)
y1 = lambdify(x, expr, 'numpy')
# For loads with negative value
else:
minus = 1
# if pictorial is True we remake the load equation again with
# some constant magnitude values.
if pictorial:
value = 10**(1-order) if order > 0 else length/2
scaled_load1 += value*SingularityFunction(x, start, order)
if end:
f2 = 10**(1-order)*x**order if order > 0 else length/2*x**order
for i in range(0, order + 1):
scaled_load1 -= (f2.diff(x, i).subs(x, end - start)*
SingularityFunction(x, end, i)/factorial(i))
if pictorial:
if isinstance(scaled_load1, Add):
load_args1 = scaled_load1.args
else:
# when the load equation consists of only a single term
load_args1 = (scaled_load1,)
load_eq1 = [i.subs(l) for i in load_args1]
else:
if isinstance(self.load, Add):
load_args1 = self.load.args1
else:
load_args1 = (self.load,)
load_eq1 = [i.subs(l) for i in load_args if list(i.atoms(SingularityFunction))[0].args[2] >= 0]
load_eq1 = -Add(*load_eq1)-height
# filling higher order loads with colour
expr = height + load_eq1.rewrite(Piecewise)
y1_ = lambdify(x, expr, 'numpy')
y = numpy.arange(0, float(length), 0.001)
y2 = float(height)
if(plus == 1 and minus == 1):
fill = {'x': y, 'y1': y1(y), 'y2': y1_(y), 'color':'darkkhaki'}
elif(plus == 1):
fill = {'x': y, 'y1': y1(y), 'y2': y2, 'color':'darkkhaki'}
else:
fill = {'x': y, 'y1': y1_(y), 'y2': y2, 'color':'darkkhaki'}
return annotations, markers, load_eq, load_eq1, fill
def _draw_supports(self, length, l):
height = float(length/10)
support_markers = []
support_rectangles = []
for support in self._applied_supports:
if l:
pos = support[0].subs(l)
else:
pos = support[0]
if support[1] == "pin":
support_markers.append({'args':[pos, [0]], 'marker':6, 'markersize':13, 'color':"black"})
elif support[1] == "roller":
support_markers.append({'args':[pos, [-height/2.5]], 'marker':'o', 'markersize':11, 'color':"black"})
elif support[1] == "fixed":
if pos == 0:
support_rectangles.append({'xy':(0, -3*height), 'width':-length/20, 'height':6*height + height, 'fill':False, 'hatch':'/////'})
else:
support_rectangles.append({'xy':(length, -3*height), 'width':length/20, 'height': 6*height + height, 'fill':False, 'hatch':'/////'})
return support_markers, support_rectangles
class Beam3D(Beam):
"""
This class handles loads applied in any direction of a 3D space along
with unequal values of Second moment along different axes.
.. note::
A consistent sign convention must be used while solving a beam
bending problem; the results will
automatically follow the chosen sign convention.
This class assumes that any kind of distributed load/moment is
applied through out the span of a beam.
Examples
========
There is a beam of l meters long. A constant distributed load of magnitude q
is applied along y-axis from start till the end of beam. A constant distributed
moment of magnitude m is also applied along z-axis from start till the end of beam.
Beam is fixed at both of its end. So, deflection of the beam at the both ends
is restricted.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols, simplify, collect, factor
>>> l, E, G, I, A = symbols('l, E, G, I, A')
>>> b = Beam3D(l, E, G, I, A)
>>> x, q, m = symbols('x, q, m')
>>> b.apply_load(q, 0, 0, dir="y")
>>> b.apply_moment_load(m, 0, -1, dir="z")
>>> b.shear_force()
[0, -q*x, 0]
>>> b.bending_moment()
[0, 0, -m*x + q*x**2/2]
>>> b.bc_slope = [(0, [0, 0, 0]), (l, [0, 0, 0])]
>>> b.bc_deflection = [(0, [0, 0, 0]), (l, [0, 0, 0])]
>>> b.solve_slope_deflection()
>>> factor(b.slope())
[0, 0, x*(-l + x)*(-A*G*l**3*q + 2*A*G*l**2*q*x - 12*E*I*l*q
- 72*E*I*m + 24*E*I*q*x)/(12*E*I*(A*G*l**2 + 12*E*I))]
>>> dx, dy, dz = b.deflection()
>>> dy = collect(simplify(dy), x)
>>> dx == dz == 0
True
>>> dy == (x*(12*E*I*l*(A*G*l**2*q - 2*A*G*l*m + 12*E*I*q)
... + x*(A*G*l*(3*l*(A*G*l**2*q - 2*A*G*l*m + 12*E*I*q) + x*(-2*A*G*l**2*q + 4*A*G*l*m - 24*E*I*q))
... + A*G*(A*G*l**2 + 12*E*I)*(-2*l**2*q + 6*l*m - 4*m*x + q*x**2)
... - 12*E*I*q*(A*G*l**2 + 12*E*I)))/(24*A*E*G*I*(A*G*l**2 + 12*E*I)))
True
References
==========
.. [1] http://homes.civil.aau.dk/jc/FemteSemester/Beams3D.pdf
"""
def __init__(self, length, elastic_modulus, shear_modulus, second_moment,
area, variable=Symbol('x')):
"""Initializes the class.
Parameters
==========
length : Sympifyable
A Symbol or value representing the Beam's length.
elastic_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of Elasticity.
It is a measure of the stiffness of the Beam material.
shear_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of rigidity.
It is a measure of rigidity of the Beam material.
second_moment : Sympifyable or list
A list of two elements having SymPy expression representing the
Beam's Second moment of area. First value represent Second moment
across y-axis and second across z-axis.
Single SymPy expression can be passed if both values are same
area : Sympifyable
A SymPy expression representing the Beam's cross-sectional area
in a plane prependicular to length of the Beam.
variable : Symbol, optional
A Symbol object that will be used as the variable along the beam
while representing the load, shear, moment, slope and deflection
curve. By default, it is set to ``Symbol('x')``.
"""
super().__init__(length, elastic_modulus, second_moment, variable)
self.shear_modulus = shear_modulus
self._area = area
self._load_vector = [0, 0, 0]
self._moment_load_vector = [0, 0, 0]
self._load_Singularity = [0, 0, 0]
self._slope = [0, 0, 0]
self._deflection = [0, 0, 0]
@property
def shear_modulus(self):
"""Young's Modulus of the Beam. """
return self._shear_modulus
@shear_modulus.setter
def shear_modulus(self, e):
self._shear_modulus = sympify(e)
@property
def second_moment(self):
"""Second moment of area of the Beam. """
return self._second_moment
@second_moment.setter
def second_moment(self, i):
if isinstance(i, list):
i = [sympify(x) for x in i]
self._second_moment = i
else:
self._second_moment = sympify(i)
@property
def area(self):
"""Cross-sectional area of the Beam. """
return self._area
@area.setter
def area(self, a):
self._area = sympify(a)
@property
def load_vector(self):
"""
Returns a three element list representing the load vector.
"""
return self._load_vector
@property
def moment_load_vector(self):
"""
Returns a three element list representing moment loads on Beam.
"""
return self._moment_load_vector
@property
def boundary_conditions(self):
"""
Returns a dictionary of boundary conditions applied on the beam.
The dictionary has two keywords namely slope and deflection.
The value of each keyword is a list of tuple, where each tuple
contains location and value of a boundary condition in the format
(location, value). Further each value is a list corresponding to
slope or deflection(s) values along three axes at that location.
Examples
========
There is a beam of length 4 meters. The slope at 0 should be 4 along
the x-axis and 0 along others. At the other end of beam, deflection
along all the three axes should be zero.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(30, E, G, I, A, x)
>>> b.bc_slope = [(0, (4, 0, 0))]
>>> b.bc_deflection = [(4, [0, 0, 0])]
>>> b.boundary_conditions
{'deflection': [(4, [0, 0, 0])], 'slope': [(0, (4, 0, 0))]}
Here the deflection of the beam should be ``0`` along all the three axes at ``4``.
Similarly, the slope of the beam should be ``4`` along x-axis and ``0``
along y and z axis at ``0``.
"""
return self._boundary_conditions
def polar_moment(self):
"""
Returns the polar moment of area of the beam
about the X axis with respect to the centroid.
Examples
========
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A = symbols('l, E, G, I, A')
>>> b = Beam3D(l, E, G, I, A)
>>> b.polar_moment()
2*I
>>> I1 = [9, 15]
>>> b = Beam3D(l, E, G, I1, A)
>>> b.polar_moment()
24
"""
if not iterable(self.second_moment):
return 2*self.second_moment
return sum(self.second_moment)
def apply_load(self, value, start, order, dir="y"):
"""
This method adds up the force load to a particular beam object.
Parameters
==========
value : Sympifyable
The magnitude of an applied load.
dir : String
Axis along which load is applied.
order : Integer
The order of the applied load.
- For point loads, order=-1
- For constant distributed load, order=0
- For ramp loads, order=1
- For parabolic ramp loads, order=2
- ... so on.
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if dir == "x":
if not order == -1:
self._load_vector[0] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
elif dir == "y":
if not order == -1:
self._load_vector[1] += value
self._load_Singularity[1] += value*SingularityFunction(x, start, order)
else:
if not order == -1:
self._load_vector[2] += value
self._load_Singularity[2] += value*SingularityFunction(x, start, order)
def apply_moment_load(self, value, start, order, dir="y"):
"""
This method adds up the moment loads to a particular beam object.
Parameters
==========
value : Sympifyable
The magnitude of an applied moment.
dir : String
Axis along which moment is applied.
order : Integer
The order of the applied load.
- For point moments, order=-2
- For constant distributed moment, order=-1
- For ramp moments, order=0
- For parabolic ramp moments, order=1
- ... so on.
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if dir == "x":
if not order == -2:
self._moment_load_vector[0] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
elif dir == "y":
if not order == -2:
self._moment_load_vector[1] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
else:
if not order == -2:
self._moment_load_vector[2] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
def apply_support(self, loc, type="fixed"):
if type in ("pin", "roller"):
reaction_load = Symbol('R_'+str(loc))
self._reaction_loads[reaction_load] = reaction_load
self.bc_deflection.append((loc, [0, 0, 0]))
else:
reaction_load = Symbol('R_'+str(loc))
reaction_moment = Symbol('M_'+str(loc))
self._reaction_loads[reaction_load] = [reaction_load, reaction_moment]
self.bc_deflection.append((loc, [0, 0, 0]))
self.bc_slope.append((loc, [0, 0, 0]))
def solve_for_reaction_loads(self, *reaction):
"""
Solves for the reaction forces.
Examples
========
There is a beam of length 30 meters. It it supported by rollers at
of its end. A constant distributed load of magnitude 8 N is applied
from start till its end along y-axis. Another linear load having
slope equal to 9 is applied along z-axis.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(30, E, G, I, A, x)
>>> b.apply_load(8, start=0, order=0, dir="y")
>>> b.apply_load(9*x, start=0, order=0, dir="z")
>>> b.bc_deflection = [(0, [0, 0, 0]), (30, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="y")
>>> b.apply_load(R2, start=30, order=-1, dir="y")
>>> b.apply_load(R3, start=0, order=-1, dir="z")
>>> b.apply_load(R4, start=30, order=-1, dir="z")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.reaction_loads
{R1: -120, R2: -120, R3: -1350, R4: -2700}
"""
x = self.variable
l = self.length
q = self._load_Singularity
shear_curves = [integrate(load, x) for load in q]
moment_curves = [integrate(shear, x) for shear in shear_curves]
for i in range(3):
react = [r for r in reaction if (shear_curves[i].has(r) or moment_curves[i].has(r))]
if len(react) == 0:
continue
shear_curve = limit(shear_curves[i], x, l)
moment_curve = limit(moment_curves[i], x, l)
sol = list((linsolve([shear_curve, moment_curve], react).args)[0])
sol_dict = dict(zip(react, sol))
reaction_loads = self._reaction_loads
# Check if any of the evaluated rection exists in another direction
# and if it exists then it should have same value.
for key in sol_dict:
if key in reaction_loads and sol_dict[key] != reaction_loads[key]:
raise ValueError("Ambiguous solution for %s in different directions." % key)
self._reaction_loads.update(sol_dict)
def shear_force(self):
"""
Returns a list of three expressions which represents the shear force
curve of the Beam object along all three axes.
"""
x = self.variable
q = self._load_vector
return [integrate(-q[0], x), integrate(-q[1], x), integrate(-q[2], x)]
def axial_force(self):
"""
Returns expression of Axial shear force present inside the Beam object.
"""
return self.shear_force()[0]
def shear_stress(self):
"""
Returns a list of three expressions which represents the shear stress
curve of the Beam object along all three axes.
"""
return [self.shear_force()[0]/self._area, self.shear_force()[1]/self._area, self.shear_force()[2]/self._area]
def axial_stress(self):
"""
Returns expression of Axial stress present inside the Beam object.
"""
return self.axial_force()/self._area
def bending_moment(self):
"""
Returns a list of three expressions which represents the bending moment
curve of the Beam object along all three axes.
"""
x = self.variable
m = self._moment_load_vector
shear = self.shear_force()
return [integrate(-m[0], x), integrate(-m[1] + shear[2], x),
integrate(-m[2] - shear[1], x) ]
def torsional_moment(self):
"""
Returns expression of Torsional moment present inside the Beam object.
"""
return self.bending_moment()[0]
def solve_slope_deflection(self):
x = self.variable
l = self.length
E = self.elastic_modulus
G = self.shear_modulus
I = self.second_moment
if isinstance(I, list):
I_y, I_z = I[0], I[1]
else:
I_y = I_z = I
A = self._area
load = self._load_vector
moment = self._moment_load_vector
defl = Function('defl')
theta = Function('theta')
# Finding deflection along x-axis(and corresponding slope value by differentiating it)
# Equation used: Derivative(E*A*Derivative(def_x(x), x), x) + load_x = 0
eq = Derivative(E*A*Derivative(defl(x), x), x) + load[0]
def_x = dsolve(Eq(eq, 0), defl(x)).args[1]
# Solving constants originated from dsolve
C1 = Symbol('C1')
C2 = Symbol('C2')
constants = list((linsolve([def_x.subs(x, 0), def_x.subs(x, l)], C1, C2).args)[0])
def_x = def_x.subs({C1:constants[0], C2:constants[1]})
slope_x = def_x.diff(x)
self._deflection[0] = def_x
self._slope[0] = slope_x
# Finding deflection along y-axis and slope across z-axis. System of equation involved:
# 1: Derivative(E*I_z*Derivative(theta_z(x), x), x) + G*A*(Derivative(defl_y(x), x) - theta_z(x)) + moment_z = 0
# 2: Derivative(G*A*(Derivative(defl_y(x), x) - theta_z(x)), x) + load_y = 0
C_i = Symbol('C_i')
# Substitute value of `G*A*(Derivative(defl_y(x), x) - theta_z(x))` from (2) in (1)
eq1 = Derivative(E*I_z*Derivative(theta(x), x), x) + (integrate(-load[1], x) + C_i) + moment[2]
slope_z = dsolve(Eq(eq1, 0)).args[1]
# Solve for constants originated from using dsolve on eq1
constants = list((linsolve([slope_z.subs(x, 0), slope_z.subs(x, l)], C1, C2).args)[0])
slope_z = slope_z.subs({C1:constants[0], C2:constants[1]})
# Put value of slope obtained back in (2) to solve for `C_i` and find deflection across y-axis
eq2 = G*A*(Derivative(defl(x), x)) + load[1]*x - C_i - G*A*slope_z
def_y = dsolve(Eq(eq2, 0), defl(x)).args[1]
# Solve for constants originated from using dsolve on eq2
constants = list((linsolve([def_y.subs(x, 0), def_y.subs(x, l)], C1, C_i).args)[0])
self._deflection[1] = def_y.subs({C1:constants[0], C_i:constants[1]})
self._slope[2] = slope_z.subs(C_i, constants[1])
# Finding deflection along z-axis and slope across y-axis. System of equation involved:
# 1: Derivative(E*I_y*Derivative(theta_y(x), x), x) - G*A*(Derivative(defl_z(x), x) + theta_y(x)) + moment_y = 0
# 2: Derivative(G*A*(Derivative(defl_z(x), x) + theta_y(x)), x) + load_z = 0
# Substitute value of `G*A*(Derivative(defl_y(x), x) + theta_z(x))` from (2) in (1)
eq1 = Derivative(E*I_y*Derivative(theta(x), x), x) + (integrate(load[2], x) - C_i) + moment[1]
slope_y = dsolve(Eq(eq1, 0)).args[1]
# Solve for constants originated from using dsolve on eq1
constants = list((linsolve([slope_y.subs(x, 0), slope_y.subs(x, l)], C1, C2).args)[0])
slope_y = slope_y.subs({C1:constants[0], C2:constants[1]})
# Put value of slope obtained back in (2) to solve for `C_i` and find deflection across z-axis
eq2 = G*A*(Derivative(defl(x), x)) + load[2]*x - C_i + G*A*slope_y
def_z = dsolve(Eq(eq2,0)).args[1]
# Solve for constants originated from using dsolve on eq2
constants = list((linsolve([def_z.subs(x, 0), def_z.subs(x, l)], C1, C_i).args)[0])
self._deflection[2] = def_z.subs({C1:constants[0], C_i:constants[1]})
self._slope[1] = slope_y.subs(C_i, constants[1])
def slope(self):
"""
Returns a three element list representing slope of deflection curve
along all the three axes.
"""
return self._slope
def deflection(self):
"""
Returns a three element list representing deflection curve along all
the three axes.
"""
return self._deflection
def _plot_shear_force(self, dir, subs=None):
shear_force = self.shear_force()
if dir == 'x':
dir_num = 0
color = 'r'
elif dir == 'y':
dir_num = 1
color = 'g'
elif dir == 'z':
dir_num = 2
color = 'b'
if subs is None:
subs = {}
for sym in shear_force[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_force[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Shear Force along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{V(%c)}$'%dir, line_color=color)
def plot_shear_force(self, dir="all", subs=None):
"""
Returns a plot for Shear force along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which shear force plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_shear_force()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -6*x**2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -15*x for x over (0.0, 20.0)
"""
dir = dir.lower()
# For shear force along x direction
if dir == "x":
Px = self._plot_shear_force('x', subs)
return Px.show()
# For shear force along y direction
elif dir == "y":
Py = self._plot_shear_force('y', subs)
return Py.show()
# For shear force along z direction
elif dir == "z":
Pz = self._plot_shear_force('z', subs)
return Pz.show()
# For shear force along all direction
else:
Px = self._plot_shear_force('x', subs)
Py = self._plot_shear_force('y', subs)
Pz = self._plot_shear_force('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_bending_moment(self, dir, subs=None):
bending_moment = self.bending_moment()
if dir == 'x':
dir_num = 0
color = 'g'
elif dir == 'y':
dir_num = 1
color = 'c'
elif dir == 'z':
dir_num = 2
color = 'm'
if subs is None:
subs = {}
for sym in bending_moment[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(bending_moment[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Bending Moment along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{M(%c)}$'%dir, line_color=color)
def plot_bending_moment(self, dir="all", subs=None):
"""
Returns a plot for bending moment along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which bending moment plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_bending_moment()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -15*x**2/2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: 2*x**3 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For bending moment along x direction
if dir == "x":
Px = self._plot_bending_moment('x', subs)
return Px.show()
# For bending moment along y direction
elif dir == "y":
Py = self._plot_bending_moment('y', subs)
return Py.show()
# For bending moment along z direction
elif dir == "z":
Pz = self._plot_bending_moment('z', subs)
return Pz.show()
# For bending moment along all direction
else:
Px = self._plot_bending_moment('x', subs)
Py = self._plot_bending_moment('y', subs)
Pz = self._plot_bending_moment('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_slope(self, dir, subs=None):
slope = self.slope()
if dir == 'x':
dir_num = 0
color = 'b'
elif dir == 'y':
dir_num = 1
color = 'm'
elif dir == 'z':
dir_num = 2
color = 'g'
if subs is None:
subs = {}
for sym in slope[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(slope[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Slope along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{\theta(%c)}$'%dir, line_color=color)
def plot_slope(self, dir="all", subs=None):
"""
Returns a plot for Slope along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which Slope plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as keys and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_slope()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -x**3/1600 + 3*x**2/160 - x/8 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: x**4/8000 - 19*x**2/172 + 52*x/43 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For Slope along x direction
if dir == "x":
Px = self._plot_slope('x', subs)
return Px.show()
# For Slope along y direction
elif dir == "y":
Py = self._plot_slope('y', subs)
return Py.show()
# For Slope along z direction
elif dir == "z":
Pz = self._plot_slope('z', subs)
return Pz.show()
# For Slope along all direction
else:
Px = self._plot_slope('x', subs)
Py = self._plot_slope('y', subs)
Pz = self._plot_slope('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_deflection(self, dir, subs=None):
deflection = self.deflection()
if dir == 'x':
dir_num = 0
color = 'm'
elif dir == 'y':
dir_num = 1
color = 'r'
elif dir == 'z':
dir_num = 2
color = 'c'
if subs is None:
subs = {}
for sym in deflection[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(deflection[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Deflection along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{\delta(%c)}$'%dir, line_color=color)
def plot_deflection(self, dir="all", subs=None):
"""
Returns a plot for Deflection along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which deflection plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as keys and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_deflection()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: x**5/40000 - 4013*x**3/90300 + 26*x**2/43 + 1520*x/903 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: x**4/6400 - x**3/160 + 27*x**2/560 + 2*x/7 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For deflection along x direction
if dir == "x":
Px = self._plot_deflection('x', subs)
return Px.show()
# For deflection along y direction
elif dir == "y":
Py = self._plot_deflection('y', subs)
return Py.show()
# For deflection along z direction
elif dir == "z":
Pz = self._plot_deflection('z', subs)
return Pz.show()
# For deflection along all direction
else:
Px = self._plot_deflection('x', subs)
Py = self._plot_deflection('y', subs)
Pz = self._plot_deflection('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def plot_loading_results(self, dir='x', subs=None):
"""
Returns a subplot of Shear Force, Bending Moment,
Slope and Deflection of the Beam object along the direction specified.
Parameters
==========
dir : string (default : "x")
Direction along which plots are required.
If no direction is specified, plots along x-axis are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> subs = {E:40, G:21, I:100, A:25}
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_loading_results('y',subs)
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: -6*x**2 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -15*x**2/2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -x**3/1600 + 3*x**2/160 - x/8 for x over (0.0, 20.0)
Plot[3]:Plot object containing:
[0]: cartesian line: x**5/40000 - 4013*x**3/90300 + 26*x**2/43 + 1520*x/903 for x over (0.0, 20.0)
"""
dir = dir.lower()
if subs is None:
subs = {}
ax1 = self._plot_shear_force(dir, subs)
ax2 = self._plot_bending_moment(dir, subs)
ax3 = self._plot_slope(dir, subs)
ax4 = self._plot_deflection(dir, subs)
return PlotGrid(4, 1, ax1, ax2, ax3, ax4)
def _plot_shear_stress(self, dir, subs=None):
shear_stress = self.shear_stress()
if dir == 'x':
dir_num = 0
color = 'r'
elif dir == 'y':
dir_num = 1
color = 'g'
elif dir == 'z':
dir_num = 2
color = 'b'
if subs is None:
subs = {}
for sym in shear_stress[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_stress[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Shear stress along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\tau(%c)$'%dir, line_color=color)
def plot_shear_stress(self, dir="all", subs=None):
"""
Returns a plot for Shear Stress along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which shear stress plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters and area of cross section 2 square
meters. It it supported by rollers at of its end. A linear load having
slope equal to 12 is applied along y-axis. A constant distributed load
of magnitude 15 N is applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, 2, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_shear_stress()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -3*x**2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -15*x/2 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For shear stress along x direction
if dir == "x":
Px = self._plot_shear_stress('x', subs)
return Px.show()
# For shear stress along y direction
elif dir == "y":
Py = self._plot_shear_stress('y', subs)
return Py.show()
# For shear stress along z direction
elif dir == "z":
Pz = self._plot_shear_stress('z', subs)
return Pz.show()
# For shear stress along all direction
else:
Px = self._plot_shear_stress('x', subs)
Py = self._plot_shear_stress('y', subs)
Pz = self._plot_shear_stress('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _max_shear_force(self, dir):
"""
Helper function for max_shear_force().
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
if not self.shear_force()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
load_curve = Piecewise((float("nan"), self.variable<=0),
(self._load_vector[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(load_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self.length)
shear_curve = self.shear_force()[dir_num]
shear_values = [shear_curve.subs(self.variable, x) for x in points]
shear_values = list(map(abs, shear_values))
max_shear = max(shear_values)
return (points[shear_values.index(max_shear)], max_shear)
def max_shear_force(self):
"""
Returns point of max shear force and its corresponding shear value
along all directions in a Beam object as a list.
solve_for_reaction_loads() must be called before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.max_shear_force()
[(0, 0), (20, 2400), (20, 300)]
"""
max_shear = []
max_shear.append(self._max_shear_force('x'))
max_shear.append(self._max_shear_force('y'))
max_shear.append(self._max_shear_force('z'))
return max_shear
def _max_bending_moment(self, dir):
"""
Helper function for max_bending_moment().
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
if not self.bending_moment()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
shear_curve = Piecewise((float("nan"), self.variable<=0),
(self.shear_force()[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(shear_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self.length)
bending_moment_curve = self.bending_moment()[dir_num]
bending_moments = [bending_moment_curve.subs(self.variable, x) for x in points]
bending_moments = list(map(abs, bending_moments))
max_bending_moment = max(bending_moments)
return (points[bending_moments.index(max_bending_moment)], max_bending_moment)
def max_bending_moment(self):
"""
Returns point of max bending moment and its corresponding bending moment value
along all directions in a Beam object as a list.
solve_for_reaction_loads() must be called before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.max_bending_moment()
[(0, 0), (20, 3000), (20, 16000)]
"""
max_bmoment = []
max_bmoment.append(self._max_bending_moment('x'))
max_bmoment.append(self._max_bending_moment('y'))
max_bmoment.append(self._max_bending_moment('z'))
return max_bmoment
max_bmoment = max_bending_moment
def _max_deflection(self, dir):
"""
Helper function for max_Deflection()
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
if not self.deflection()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
slope_curve = Piecewise((float("nan"), self.variable<=0),
(self.slope()[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(slope_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self._length)
deflection_curve = self.deflection()[dir_num]
deflections = [deflection_curve.subs(self.variable, x) for x in points]
deflections = list(map(abs, deflections))
max_def = max(deflections)
return (points[deflections.index(max_def)], max_def)
def max_deflection(self):
"""
Returns point of max deflection and its corresponding deflection value
along all directions in a Beam object as a list.
solve_for_reaction_loads() and solve_slope_deflection() must be called
before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.max_deflection()
[(0, 0), (10, 495/14), (-10 + 10*sqrt(10793)/43, (10 - 10*sqrt(10793)/43)**3/160 - 20/7 + (10 - 10*sqrt(10793)/43)**4/6400 + 20*sqrt(10793)/301 + 27*(10 - 10*sqrt(10793)/43)**2/560)]
"""
max_def = []
max_def.append(self._max_deflection('x'))
max_def.append(self._max_deflection('y'))
max_def.append(self._max_deflection('z'))
return max_def
|
PypiClean
|
/pyprika_client-0.2.1-py3-none-any.whl/pyprika/framework/specifications.py
|
from abc import ABC
class Specification:
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
def __xor__(self, other):
return Xor(self, other)
def __invert__(self):
return Invert(self)
def is_satisfied_by(self, candidate):
raise NotImplementedError()
def remainder_unsatisfied_by(self, candidate):
if self.is_satisfied_by(candidate):
return None
else:
return self
class CompositeSpecification(Specification, ABC):
pass
class MultaryCompositeSpecification(CompositeSpecification, ABC):
def __init__(self, *specifications):
self.specifications = specifications
class And(MultaryCompositeSpecification):
def __and__(self, other):
if isinstance(other, And):
self.specifications += other.specifications
else:
self.specifications += (other,)
return self
def is_satisfied_by(self, candidate):
satisfied = all([
specification.is_satisfied_by(candidate)
for specification in self.specifications
])
return satisfied
def remainder_unsatisfied_by(self, candidate):
non_satisfied = [
specification
for specification in self.specifications
if not specification.is_satisfied_by(candidate)
]
if not non_satisfied:
return None
if len(non_satisfied) == 1:
return non_satisfied[0]
if len(non_satisfied) == len(self.specifications):
return self
return And(*non_satisfied)
class UnaryCompositeSpecification(CompositeSpecification, ABC):
def __init__(self, specification):
self.specification = specification
class Invert(UnaryCompositeSpecification):
def is_satisfied_by(self, candidate):
return not self.specification.is_satisfied_by(candidate)
class Or(MultaryCompositeSpecification):
def __or__(self, other):
if isinstance(other, Or):
self.specifications += other.specifications
else:
self.specifications += (other,)
return self
def is_satisfied_by(self, candidate):
satisfied = any([
specification.is_satisfied_by(candidate)
for specification in self.specifications
])
return satisfied
class BinaryCompositeSpecification(CompositeSpecification, ABC):
def __init__(self, left, right):
self.left = left
self.right = right
class Xor(BinaryCompositeSpecification):
def is_satisfied_by(self, candidate):
return (
self.left.is_satisfied_by(candidate) ^
self.right.is_satisfied_by(candidate)
)
class NullaryCompositeSpecification(CompositeSpecification, ABC):
pass
class FalseSpecification(NullaryCompositeSpecification):
def is_satisfied_by(self, candidate):
return False
class TrueSpecification(NullaryCompositeSpecification):
def is_satisfied_by(self, candidate):
return True
|
PypiClean
|
/prob_dis-1.0.tar.gz/prob_dis-1.0/prob_dis/gauss_dis.py
|
import math
import matplotlib.pyplot as plt
from .general_dis import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev)
|
PypiClean
|
/polyglot-code-1.0.10.tar.gz/polyglot-code-1.0.10/README.rst
|
Polyglot Code!
==============
|python| |javascript|
Clone me!
---------------
Clone or download the project:
.. code:: sh
git clone https://github.com/cosasdepuma/polyglot-code.git Polyglot
[ or ]
git clone https://gitlab.com/cosasdepuma/polyglot-code.git Polyglot
cd Polyglot
Test it!
------------------------
Run all the unitary tests through NodeJS:
.. code:: sh
cd javascript
npm i
npm test
Run all the unitary tests using Python:
.. code:: sh
cd python
python2 test/test.py
python3 test/test.py
Requirements
-----------------------------
Polyglot does not require anything!
Usage
--------------
Import **Polyglot** into your NodeJS file:
.. code:: js
require('polyglot-code')
Import **Polyglot** into your Python file:
.. code:: py
import polyglot
Available Languages!
--------------------------------------
JavaScript
-------------------
.. code:: js
// No new-line logs
printf('C')
IO.write('Elixir')
fmt.Printf('Go')
putStr('Haskell')
io.write('Lua')
fprintf('Matlab')
Write('Pascal')
write('Pike')
// New-line logs
trace('ActionScript')
WriteF('AmigaE')
Response.Write('ASP')
putschar('B')
puts('C')
Console.println('C#')
Console.WriteLine('C#')
writeln('D')
std.stdio.writeln('D')
print('Dart')
Writeln('Delphi')
IO.puts('Elixir')
fmt.Println('Go')
putStrLn('Haskell')
System.out.println('Java')
console.log('JavaScript')
println('Kotlin')
disp('Matlab')
echo('Nim')
NSLog('Objetive-C')
writeln('Pascal')
writeln('Pike')
println('Processing')
PrintN('PureBasic')
print('Python')
p('Ruby')
Python
--------------
.. code:: py
# No new-line logs
printf("C")
cout << "C++"
IO.write("Elixir")
fmt.Print("Go")
fmt.Printf("Go")
putStr("Haskell")
System.out.printf("Java")
io.write("Lua")
fprintf("Matlab")
write("Pike")
System.Console.Write("VB.NET")
# New-line logs
print("Python")
trace("ActionScript")
Ada.Text_IO.Put_Line("Ada")
WriteF("AmigaE")
Response.Write("ASP")
putchar("B")
puts("C")
Console.println("C#")
Console.WriteLine("C#")
cout << "C++" << endl
Qout("Clipper")
writeln("D")
std.stdio.writeln("D")
Writeln("Delphi")
IO.puts("Elixir")
fmt.Println("Go")
putStrLn("Haskell")
System.out.println("Java")
console.log("JavaScript")
println("Kotlin")
Print["Mathematica"]
disp("Matlab")
echo("Nim")
NSLog("Objetive-C")
writeln("Pascal")
writeln("Pike")
PrintN("PureBasic")
p("Ruby")
Debug.Log("Unity3D")
System.Console.WriteLine("VB.NET")
Support the developer!
--------------------------------
Everything I do and publish can be used for free whenever I receive my
corresponding merit.
Scheme of contents
----------------------------------
::
Polyglot-Code
< Repository >
|__ .gitignore
|__ .repository
|__ odd.jpeg
|__ LICENSE
|__ README.md
< JavaScript >
|__ javascript
|__ package.json
|__ lib
|__ polyglot.js
|__ test
|__ test.js
< Python >
|__ python
|__ lib
|__ __init__.py
|__ polyglot.py
|__ test
|__ test.py
--------------
Please contact with `Kike Puma <https://linkedin.com/in/kikepuma>`__ if
you need more information.
.. |python| image:: https://img.shields.io/badge/language-python-blue.svg?style=for-the-badge
.. |javascript| image:: https://img.shields.io/badge/language-javascript-yellow.svg?style=for-the-badge
|
PypiClean
|
/azure-communication-identity-1.4.0b1.zip/azure-communication-identity-1.4.0b1/azure/communication/identity/_generated/_configuration.py
|
from typing import Any
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
VERSION = "unknown"
class CommunicationIdentityClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for CommunicationIdentityClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param endpoint: The communication resource, for example
https://my-resource.communication.azure.com. Required.
:type endpoint: str
:keyword api_version: Api Version. Default value is "2022-10-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, endpoint: str, **kwargs: Any) -> None:
super(CommunicationIdentityClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop("api_version", "2022-10-01") # type: str
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
self.endpoint = endpoint
self.api_version = api_version
kwargs.setdefault('sdk_moniker', 'communicationidentityclient/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
|
PypiClean
|
/pulumi_aws-6.1.0a1693529760.tar.gz/pulumi_aws-6.1.0a1693529760/pulumi_aws/ses/template.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['TemplateArgs', 'Template']
@pulumi.input_type
class TemplateArgs:
def __init__(__self__, *,
html: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
text: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Template resource.
:param pulumi.Input[str] html: The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
:param pulumi.Input[str] name: The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
:param pulumi.Input[str] subject: The subject line of the email.
:param pulumi.Input[str] text: The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
if html is not None:
pulumi.set(__self__, "html", html)
if name is not None:
pulumi.set(__self__, "name", name)
if subject is not None:
pulumi.set(__self__, "subject", subject)
if text is not None:
pulumi.set(__self__, "text", text)
@property
@pulumi.getter
def html(self) -> Optional[pulumi.Input[str]]:
"""
The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "html")
@html.setter
def html(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "html", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def subject(self) -> Optional[pulumi.Input[str]]:
"""
The subject line of the email.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject", value)
@property
@pulumi.getter
def text(self) -> Optional[pulumi.Input[str]]:
"""
The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "text")
@text.setter
def text(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "text", value)
@pulumi.input_type
class _TemplateState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
html: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
text: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Template resources.
:param pulumi.Input[str] arn: The ARN of the SES template
:param pulumi.Input[str] html: The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
:param pulumi.Input[str] name: The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
:param pulumi.Input[str] subject: The subject line of the email.
:param pulumi.Input[str] text: The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if html is not None:
pulumi.set(__self__, "html", html)
if name is not None:
pulumi.set(__self__, "name", name)
if subject is not None:
pulumi.set(__self__, "subject", subject)
if text is not None:
pulumi.set(__self__, "text", text)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the SES template
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def html(self) -> Optional[pulumi.Input[str]]:
"""
The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "html")
@html.setter
def html(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "html", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def subject(self) -> Optional[pulumi.Input[str]]:
"""
The subject line of the email.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject", value)
@property
@pulumi.getter
def text(self) -> Optional[pulumi.Input[str]]:
"""
The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "text")
@text.setter
def text(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "text", value)
class Template(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
html: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
text: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a resource to create a SES template.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
my_template = aws.ses.Template("myTemplate",
html="<h1>Hello {{name}},</h1><p>Your favorite animal is {{favoriteanimal}}.</p>",
subject="Greetings, {{name}}!",
text=\"\"\"Hello {{name}},
Your favorite animal is {{favoriteanimal}}.
\"\"\")
```
## Import
Using `pulumi import`, import SES templates using the template name. For example:
```sh
$ pulumi import aws:ses/template:Template MyTemplate MyTemplate
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] html: The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
:param pulumi.Input[str] name: The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
:param pulumi.Input[str] subject: The subject line of the email.
:param pulumi.Input[str] text: The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[TemplateArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a resource to create a SES template.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
my_template = aws.ses.Template("myTemplate",
html="<h1>Hello {{name}},</h1><p>Your favorite animal is {{favoriteanimal}}.</p>",
subject="Greetings, {{name}}!",
text=\"\"\"Hello {{name}},
Your favorite animal is {{favoriteanimal}}.
\"\"\")
```
## Import
Using `pulumi import`, import SES templates using the template name. For example:
```sh
$ pulumi import aws:ses/template:Template MyTemplate MyTemplate
```
:param str resource_name: The name of the resource.
:param TemplateArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TemplateArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
html: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
text: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TemplateArgs.__new__(TemplateArgs)
__props__.__dict__["html"] = html
__props__.__dict__["name"] = name
__props__.__dict__["subject"] = subject
__props__.__dict__["text"] = text
__props__.__dict__["arn"] = None
super(Template, __self__).__init__(
'aws:ses/template:Template',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
html: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
text: Optional[pulumi.Input[str]] = None) -> 'Template':
"""
Get an existing Template resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN of the SES template
:param pulumi.Input[str] html: The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
:param pulumi.Input[str] name: The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
:param pulumi.Input[str] subject: The subject line of the email.
:param pulumi.Input[str] text: The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TemplateState.__new__(_TemplateState)
__props__.__dict__["arn"] = arn
__props__.__dict__["html"] = html
__props__.__dict__["name"] = name
__props__.__dict__["subject"] = subject
__props__.__dict__["text"] = text
return Template(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN of the SES template
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def html(self) -> pulumi.Output[Optional[str]]:
"""
The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "html")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def subject(self) -> pulumi.Output[Optional[str]]:
"""
The subject line of the email.
"""
return pulumi.get(self, "subject")
@property
@pulumi.getter
def text(self) -> pulumi.Output[Optional[str]]:
"""
The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts.
"""
return pulumi.get(self, "text")
|
PypiClean
|
/dayu_ffmpeg-0.5.1.tar.gz/dayu_ffmpeg-0.5.1/README.rst
|
dayu_ffmpeg
===========
|pypi| |python| |build status| |github license|
dayu_ffmpeg 是针对ffmpeg 命令行的python 封装。 ffmpeg
的功能相当强大,但是复杂的终端指令往往让人无法使用。dayu_ffmpeg
正好解决了这个问题。 用户可以使用简单的 “流”
的概念搭建自己的处理方式,同时具有下面的特点:
- 通过 >> 运算符,表示stream 的操作
- 拥有ad-hoc 和network 两种操作模式
- 支持ffmpeg complex filter
- 如果用户需要使用的filter 不在默认的filter class 中,可以自行扩展
- 可以将Root Node 保存为ffscipt 的json
格式,方便保存硬盘文件或者是网络通信
ad-hoc 模式
===========
在ad-hoc 模式下,用户可以直接使用节点进行串行操作。
例如,用户希望对一个mov 素材进行下面的操作:
- 绘制2.39 的遮幅
- 缩放到1920x1080 的尺寸
- 写入内嵌reel
- 写入内嵌timecode
- 渲染输出到prores 422 的mov
如果直接使用ffmpeg,那么终端指令会是:
.. code:: shell
"ffmpeg" -y -i "/some/input/file.mov" -filter_complex "[0]drawbox=x=-t:y=0:w=iw+t*2:h=ih:c=black:t=(ih-(iw/2.39))/2[v0],[v0]scale=w=1920:h=1080[v1]" -map [v1] -metadata:s:v:0 reel_name=reelname -timecode 11:22:11:22 -codec:v prores_ks "/some/output/file.mov"
如果使用了dayu_ffmpeg,就会非常的直观。用户将自己的需要的操作一一写入皆可:
.. code:: python
from dayu_ffmpeg input *
result = Input('/some/input/file.mov') >> \
Drawmask(2.39) >> \
Scale(1920, 1080) >> \
Writereel('reelname') >> \
Writetimecode('11:22:11:22') >> \
Codec(video='prores_ks') >> \
Output('/some/output/file.mov')
需要注意的是,在ad-hoc 模式下,只能够支持串行的操作。
也就是说所有的节点都只能拥有一个输入和一个输出。
如果想要使用更加复杂的转码,请使用network 模式
network 模式
============
netowrk 模式会比ad-hoc
复杂一点,但是可以实现更加复杂的转码结构。并且一旦TD 写好了一个network,
那么后续的用户在使用上就非常的简单,相当于提供了一个 “转码模板”。
.. code:: python
from dayu_ffmpeg import *
class TranscodeTemplate(RootNode):
def prepare(self):
# 用户只需要重载prepare() 函数,在这里组织好网络结构
# 留好InputHolder 或者OutputHolder 作为 "接口"
ih1 = self.create_node(InputHolder)
i2 = self.create_node(Input('some_logo.png'))
cf = self.create_node(ComplexFilterGroup)
ih2 = cf.create_node(InputHolder)
ih3 = cf.create_node(InputHolder)
cf.set_input(ih1, 0)
cf.set_input(i2, 1)
over = cf.create_node(Overlay)
over.set_input(ih2, 0)
over.set_input(ih3, 1)
fit = cf.create_node(Fit())
fit.set_input(over)
oh1 = cf.create_node(OutputHolder)
oh1.set_input(fit)
oh2 = self.create_node(OutputHolder)
oh2.set_input(cf)
if __name__ == '__main__':
# 实例化转码的网络
template_root = TranscodeTemplate(name='overlay logo, then fit in HD, finally export to mov')
# 创建输入、输出
input1 = Input('some_input_file.mov')
output1 = Output('some_output_file.mov')
# 用户直接调用,就完成了整个转码,相当于调用"模板"
network_mode_cmd = template_root(input_list=[input1], output_list=[output1])
print network_mode_cmd.cmd()
使用自定义filter
================
如果默认的filter
中不存在需要使用的filter。用户可以有下面的几种方法自行扩展:
- 调用GeneralUnaryFilter
- 继承BaseFilterNode class,自行实现
- 继承BasePackedFilterNode, 将多种filter 进行打包,形成新的一个filter
调用GeneralUnaryFilter:
.. code:: python
command = Input('/some/input/file.mov') >> \
GeneralUnaryFilter('drawgrid', x=0, y=0, w=100, h=50) >> \
Output('/custom/filter/output.mov')
继承BaseFilterNode class,自行实现:
.. code:: python
class Null(BaseFilterNode):
# 设置特定的type,要保证唯一性
type = 'some_ffmpeg_filter_name'
# 重载init,实现自己的参数
def __init__(self, **kwargs):
super(Null, self).__init__(**kwargs)
# 重载 simple_cmd_string,返回对应的ffmpeg 指令string
def simple_cmd_string(self):
self._cmd = u'null'
return self._cmd
继承BasePackedFilterNode, 将多种filter 进行打包,形成新的一个filter。
可以参看 Fit 这个class 的实现方式。
查看shell 指令 以及运行
=======================
用户可以查看生成的shell 指令,或者直接运行:
.. code:: python
# 查看将要运行的终端指令
print command.cmd()
# 组装filter 之后,即可运行命令
for progress in command.run():
try:
print progress # 通过yield 返回渲染进度的dict,用户可以自行实现非阻塞进度条
except Exception as e:
raise # 如果指令错误,会抛出异常
ffscript 的保存和读取
=====================
ffscript 是dayu_ffmpeg 对于network 结构的一种json
表现形式,可以认为是“工程文件”。 如果想要把组成的network
保存到硬盘上,或是通过网络通信进行传递,就会使用到。
保存ffscript:
.. code:: python
from dayu_ffmpeg.ffscript import save_script, open_script
# 保存
save_script(netowrk_node_instance, '/some/script/path.json')
# 读取
transcode_template = open_script('/some/script/path.json')
.. |pypi| image:: https://img.shields.io/badge/pypi-0.5-blue.svg
:target: https://pypi.org/project/dayu-ffmpeg/
.. |python| image:: https://img.shields.io/badge/python-2.7-blue.svg
:target:
.. |build status| image:: https://travis-ci.org/phenom-films/dayu_ffmpeg.svg?branch=master
:target: https://travis-ci.org/phenom-films/dayu_ffmpeg
.. |github license| image:: https://img.shields.io/github/license/mashape/apistatus.svg
:target: https://github.com/phenom-films/dayu_ffmpeg/blob/master/license
|
PypiClean
|
/splunk-appinspect-2.37.0.tar.gz/splunk-appinspect-2.37.0/splunk_appinspect/checks/check_python_files.py
|
import ast
import logging
import os
import platform
import re
import semver
import splunk_appinspect
from splunk_appinspect.check_messages import FailMessage, WarningMessage
from splunk_appinspect.check_routine.python_ast_searcher.ast_searcher import AstSearcher
from splunk_appinspect.check_routine.python_ast_searcher.node_filters import is_sub_class_def
from splunk_appinspect.checks import Check, CheckConfig
from splunk_appinspect.python_analyzer import utilities
from splunk_appinspect.python_analyzer.ast_info_query import Any
from splunk_appinspect.python_analyzer.ast_types import AstVariable
from splunk_appinspect.python_modules_metadata.metadata_common import metadata_consts
from splunk_appinspect.python_modules_metadata.python_modules_metadata_store import metadata_store
from splunk_appinspect.regex_matcher import RegexBundle, RegexMatcher
logger = logging.getLogger(__name__)
report_display_order = 40
@splunk_appinspect.tags("cloud", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_hidden_python_files(app, reporter):
"""Check that there are no hidden python files included in the app."""
if platform.system() == "Windows":
for directory, file, _ in app.iterate_files(excluded_types=[".py"]):
current_file_relative_path = os.path.join(directory, file)
reporter_output = (
"Please manual check this "
"file to check if it is a hidden python file. "
f"File: {current_file_relative_path}"
)
reporter.manual_check(
reporter_output,
file_name=current_file_relative_path,
)
else:
client = app.python_analyzer_client
for filepath in client.get_hidden_python_files():
import chardet
byte_string = open(os.path.join(app.app_temp_dir, filepath), "rb").read()
encoding = chardet.detect(byte_string)["encoding"]
file_full_path = os.path.join(app.app_temp_dir, filepath)
try:
content = open(file_full_path, "r", encoding=encoding).read()
except UnicodeDecodeError as ex:
logger.warning(f"Decoding error: {ex}, file: {file_full_path}")
content = open(file_full_path, "r", encoding="utf-8").read()
content = re.sub("[\r\n]+", " ", content)
# this check only focus on python template code
network_patterns = "(urllib|socket|httplib|requests|smtplib|ftplib|nntplib|poplib|imaplib|telnetlib|gopherlib|xmlrpclib|SimpleHTTPServer|SimpleXMLRPCServer)"
system_modules = "(subprocess|shutil|os|sys|distutils|threading|multiprocessing|commands)"
from_import_modules = rf"from\s+import\s+{system_modules}"
import_modules = rf"import\s+{system_modules}"
file_manipulation = r"\.(read|write|open)"
possible_injection = r"(subclasses|__class__|config)\.(iteritems|items)\(\)"
patterns = [
network_patterns,
from_import_modules,
import_modules,
file_manipulation,
possible_injection,
]
template_pairs = [("<%.*", ".*%>"), ("{{.*", ".*}}"), ("{%.*", ".*%}")]
for search_pattern in [pair[0] + pattern + pair[1] for pattern in patterns for pair in template_pairs]:
if re.search(search_pattern, content):
reporter_output = f"Hidden python script found. File: {filepath}"
reporter.manual_check(reporter_output, file_name=filepath)
break
@splunk_appinspect.tags(
"splunk_appinspect", "cloud", "private_app", "private_victoria", "migration_victoria", "private_classic"
)
@splunk_appinspect.cert_version(min="1.0.0")
def check_for_compiled_python(app, reporter):
"""Check that there are no `.pyc` or `.pyo` files included in the app."""
for directory, filename, _ in app.iterate_files(types=[".pyc", ".pyo"]):
current_file_relative_path = os.path.join(directory, filename)
reporter_output = f"A Compiled Python file was detected. File: {current_file_relative_path}"
reporter.fail(reporter_output, current_file_relative_path)
@splunk_appinspect.tags("cloud", "ast", "private_app", "private_victoria", "migration_victoria", "private_classic")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_possible_threading(app, reporter):
"""Check for the use of threading, and multiprocesses. Threading or process must be
used with discretion and not negatively affect the Splunk installation as a
whole.
"""
client = app.python_analyzer_client
circle_check_namespace = [
"os.forkpty",
"os.fork",
"thread.start_new_thread",
"os.kill",
"os.killpg",
"threading.Thread.start",
"multiprocessing.Process.start",
]
modules = [
metadata_consts.ModuleNameConsts.OS,
metadata_consts.ModuleNameConsts.SUBPROCESS,
metadata_consts.ModuleNameConsts.THREAD,
metadata_consts.ModuleNameConsts.THREADING,
metadata_consts.ModuleNameConsts._THREAD, # pylint: disable=W0212
metadata_consts.ModuleNameConsts.MULTIPROCESSING,
]
check_objects = (
metadata_store.query()
.namespace_prefixes(modules)
.tag(metadata_consts.TagConsts.THREAD_SECURITY)
.python_compatible()
.collect()
)
for file_path, ast_info in client.get_all_ast_infos():
for check_object in check_objects:
module_name = ".".join(check_object.namespace.split(".")[:-1])
# questionable functions in circle invoke
if check_object.namespace in circle_check_namespace:
loop_nodes = utilities.find_python_function_in_loop(ast_info, module_name, check_object.name)
for node in loop_nodes:
reporter_output_for_loopcheck = (
f"The following line contains questionable usage `{check_object.namespace}` in loop. "
"Use threading and multiprocessing with discretion. "
f"File: {file_path} "
f"Line: {node.lineno}"
)
reporter.warn(
reporter_output_for_loopcheck,
file_name=file_path,
line_number=node.lineno,
)
else:
node_linenos = ast_info.get_module_function_call_usage(module_name, check_object.name, lineno_only=True)
for node_lineno in node_linenos:
reporter_output = (
f"The following line contains {check_object.namespace} usage. "
"Use threading and multiprocessing with discretion. "
f"File: {file_path} "
f"Line: {node_lineno}"
)
reporter.warn(
reporter_output,
file_name=file_path,
line_number=node_lineno,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_built_in_import_function(app, reporter):
"""Check that the python `__import__` method is not used in a way that
can be exploited (e.g., __import__(conf_setting) is at risk of code
injection).
"""
# This method shouldn't be used because imports should be explicit to
# prevent execution of unintended code. If you're dynamically loading
# libraries via strings there is some concern
# https://docs.python.org/2/library/functions.html#__import__
# Nice SO dicussion on this here:
# http://stackoverflow.com/questions/28231738/import-vs-import-vs-importlib-import-module
# http://stackoverflow.com/questions/2724260/why-does-pythons-import-require-fromlist
# https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
reporter_output_template = (
"The `{}` function was detected being"
" used. Please use the `import` keyword instead."
" Third-Party libraries are exempt from this"
" requirement."
)
import_function = [metadata_consts.built_in_import_function()]
files_with_results = AstSearcher(app.python_analyzer_client).search(import_function)
reporter.ast_manual_check(reporter_output_template, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_file_and_directory_access(app, reporter):
"""Check for possible file and directory access, they could be used in external file manipulation"""
function_reporter_output = (
"The following line will be inspected during code review. "
+ "The `{}` module/method can be used to access file/directory outside of the app dir. "
+ "Function call arguments: {}, keywords: {} "
)
modules = [
metadata_consts.ModuleNameConsts.OS,
metadata_consts.ModuleNameConsts.SHUTIL,
metadata_consts.ModuleNameConsts.TEMPFILE,
metadata_consts.ModuleNameConsts.LINECACHE,
metadata_consts.ModuleNameConsts.EMAIL,
metadata_consts.ModuleNameConsts.IO,
]
functions = (
metadata_store.query()
.namespace_prefixes(modules)
.tag(metadata_consts.TagConsts.FILE_READ_AND_WRITE)
.python_compatible()
.functions()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(functions, get_func_params=True)
reporter.ast_manual_check(function_reporter_output, files_with_results)
@splunk_appinspect.tags(
"splunk_appinspect",
"cloud",
"security",
"ast",
"private_app",
"private_victoria",
"migration_victoria",
"private_classic",
)
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_python_udp_network_communications(app, reporter):
"""Check for UDP network communication"""
reporter_output = (
"Please check for inbound or outbound UDP network communications."
"Any programmatic UDP network communication is prohibited due to security risks in Splunk Cloud and App Certification."
"The use or instruction to configure an app using Settings -> Data Inputs -> UDP within Splunk is permitted. (Note: "
"UDP configuration options are not available in Splunk Cloud and as such do not impose a security risk."
)
client = app.python_analyzer_client
for filepath, ast_info in client.get_all_ast_infos():
# find inner call node usages
query = ast_info.query().call_nodes(force_propagate=False)
while not query.is_end():
query.call_nodes(force_propagate=False)
udp_nodes = (
query.filter(Any(ast_info.get_module_function_call_usage("socket", "socket", fuzzy=True)))
.filter(Any(ast_info.get_module_usage("socket.AF_INET")))
.filter(Any(ast_info.get_module_usage("socket.SOCK_DGRAM")))
.collect()
)
for node in udp_nodes:
reporter.fail(reporter_output, filepath, node.lineno)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_unencrypted_network_communications(app, reporter):
"""Check that all network communications are encrypted"""
message = (
"Please check for inbound or outbound unencrypted network communications in `{}`. "
"All communications with Splunk Cloud must be encrypted."
)
components = (
metadata_store.query()
.tag(metadata_consts.TagConsts.APPLICATION_LAYER_PROTOCOL_CONNECTION)
.python_compatible()
.collect()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_all_python_files_are_well_formed(app, reporter):
"""Check all python files are well formed under python2 and python3 standard"""
error_template = (
"Python script is not well formed, {message} when parser try to parse. "
"Runtime errors and possible style issues could exist when it is executed. "
"Please manual check if the whole app is broken, if yes, fail this app. "
"If syntax error only block part of app's functionality, warn developer to fix it. "
"File: {filepath}"
)
syntax_error_message = "syntax error found in python script"
null_byte_error_message = "python script contains null byte"
other_exception_message = "issues like `StackOverFlow` or `SystemError` may exist"
client = app.python_analyzer_client
for filepath in client.get_syntax_error_files():
reporter.manual_check(
error_template.format(message=syntax_error_message, filepath=filepath),
filepath,
)
for filepath in client.get_null_byte_error_files():
reporter.manual_check(
error_template.format(message=null_byte_error_message, filepath=filepath),
filepath,
)
for filepath in client.get_other_exception_files():
reporter.manual_check(
error_template.format(message=other_exception_message, filepath=filepath),
filepath,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_builtin_functions(app, reporter):
"""Check for builtin functions(`open`, `eval`, `execfile`, `file`) usages in python files"""
# open, file, execfile usages
reporter_output = (
"The following line will be inspected during code review. "
+ "The `{}` module/method can be used to manipulate files outside of the app dir. "
)
built_in_file_functions = metadata_consts.file_manipulation_functions()
def has_args(call_node, ast_info): # pylint: disable=W0613
return call_node.args
files_with_results = AstSearcher(app.python_analyzer_client).search(built_in_file_functions, node_filter=has_args)
reporter.ast_manual_check(reporter_output, files_with_results)
# eval usage
reporter_output = (
"The following line will be inspected during code review. "
+ "The {} module/method can be used to execute arbitrary expression. "
)
built_in_eval_functions = [metadata_consts.built_in_eval_function()]
def has_non_string_args(call_node, ast_info):
if call_node.args:
variable = ast_info.get_variable_details(call_node.args[0])
# if variable could be parsed as string, analyzer could cover its usage
return not AstVariable.is_string(variable)
return False
files_with_results = AstSearcher(app.python_analyzer_client).search(
built_in_eval_functions, node_filter=has_non_string_args
)
reporter.ast_manual_check(reporter_output, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_python_untrusted_xml_functions(app, reporter):
"""Check for untrusted xml usages in python libraries"""
message = (
"The following lines should be inspected during code review, the xml functions not safe enough. "
"Using various XLM methods to parse untrusted XML data is known to be vulnerable to XML attacks. "
"Methods should be replaced with their defusedxml equivalents."
"Module/Method is {} "
)
functions = (
metadata_store.query()
.tag(metadata_consts.TagConsts.FILE_READ_AND_WRITE)
.namespace_prefix(metadata_consts.ModuleNameConsts.XML)
.python_compatible()
.functions()
)
classes = metadata_store.query().tag(metadata_consts.TagConsts.XML_RPC_CONNECTION).python_compatible().classes()
components = functions + classes
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_data_persistence(app, reporter):
"""check for data persistence usage which could be used to invoke marshall function call"""
# ignore nodes that creating in memory db
def _is_non_in_memory_db_creation(node, ast_info):
if node.args:
arg = node.args[0]
variable = ast_info.get_variable_details(arg)
return not (AstVariable.is_string(variable) and variable.variable_value == ":memory:")
return True
serialization_reporter_output = (
"The following lines should be inspected during code review, `{}` could be used to "
"serialize and deserialize python object. Check if serialization result will be stored "
"outside of App dir."
)
persistence_reporter_output = (
"The following lines should be inspected during code review, `{}` could be used to "
"store memory data to disk. Check if in-memory data will be stored outside of App "
"dir."
)
serialization_modules = [
metadata_consts.ModuleNameConsts.PICKLE,
metadata_consts.ModuleNameConsts.CPICKLE,
metadata_consts.ModuleNameConsts.MARSHAL,
]
persistence_modules = [
metadata_consts.ModuleNameConsts.SHELVE,
metadata_consts.ModuleNameConsts.ANYDBM,
metadata_consts.ModuleNameConsts.DBM,
metadata_consts.ModuleNameConsts.GDBM,
metadata_consts.ModuleNameConsts.DUMBDBM,
]
serialization_functions = (
metadata_store.query()
.namespace_prefixes(serialization_modules)
.tag(metadata_consts.TagConsts.DATA_PERSISTENCE)
.python_compatible()
.functions()
)
persistence_functions = (
metadata_store.query()
.namespace_prefixes(persistence_modules)
.tag(metadata_consts.TagConsts.DATA_PERSISTENCE)
.python_compatible()
.functions()
)
searcher = AstSearcher(app.python_analyzer_client)
files_with_results = searcher.search(serialization_functions)
reporter.ast_manual_check(serialization_reporter_output, files_with_results)
files_with_results = searcher.search(persistence_functions)
reporter.ast_manual_check(persistence_reporter_output, files_with_results)
sqlite3_functions = (
metadata_store.query()
.namespace_prefix(metadata_consts.ModuleNameConsts.SQLITE3)
.tag(metadata_consts.TagConsts.DATA_PERSISTENCE)
.python_compatible()
.functions()
)
files_with_results = searcher.search(sqlite3_functions, node_filter=_is_non_in_memory_db_creation)
reporter.ast_manual_check(persistence_reporter_output, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_plain_text_credentials_in_python(app, reporter):
"""check for plain text credentials disclosure in python files"""
sensitive_name_patterns = ["login", "community", "key", "pass", "pwd", "token", "secret", "code", "auth"]
def check_variable_name(variable_name):
return any(
map(
lambda pattern: re.search(pattern, variable_name, re.IGNORECASE),
sensitive_name_patterns,
)
)
client = app.python_analyzer_client
node_to_name_dict = {}
for filepath, ast_info in client.get_all_ast_infos():
all_ast_nodes = set()
for variable_name, ast_node_set in filter(
lambda tuple: check_variable_name(tuple[0]),
iter(ast_info.variable_usage.items()),
):
for ast_node in ast_node_set:
node_to_name_dict[ast_node] = variable_name
variable = ast_info.get_variable_details(ast_node)
if AstVariable.is_number(variable) or AstVariable.is_string(variable):
all_ast_nodes.add(ast_node)
all_assign_nodes = ast_info.query().propagate_nodes(ast.Assign).filter(Any(all_ast_nodes)).collect()
sensitive_ast_nodes = set()
for assign_node in all_assign_nodes:
sensitive_ast_nodes |= utilities.fetch_all_nodes_belonging_to_given_subtree(assign_node, all_ast_nodes)
for sensitive_node in sensitive_ast_nodes:
node_variable = ast_info.get_variable_details(sensitive_node)
reporter_output = (
"The following lines should be inspected during code review, "
"Possible plain text credentials disclosure here, "
f"`{node_to_name_dict[sensitive_node]} = {node_variable.variable_value}` "
f"File: {filepath} Line: {sensitive_node.lineno}"
)
reporter.manual_check(
reporter_output,
file_name=filepath,
line_number=sensitive_node.lineno,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_interprocess_communication_and_networking(app, reporter):
"""check if networking or file manipulation exist in interprocess modules usage"""
inheritance_output_template = (
"The following lines should be inspected during code review. "
"Class inherited from `{}` could be used to communicate with outside services or "
"files. "
)
socket_connection_template = (
"The following lines should be inspected during code review. "
"`{}` could be used to open socket connection to outside service. "
)
classes = (
metadata_store.query()
.namespace_prefixes(
[
metadata_consts.ModuleNameConsts.ASYNCHAT,
metadata_consts.ModuleNameConsts.ASYNCORE,
]
)
.tag(metadata_consts.TagConsts.NETWORK_CONNECTION)
.python_compatible()
.classes()
)
searcher = AstSearcher(app.python_analyzer_client)
files_with_results = searcher.search(classes, node_filter=is_sub_class_def, search_module_usage=True)
reporter.ast_manual_check(inheritance_output_template, files_with_results)
socket_functions = (
metadata_store.query()
.namespace_prefixes([metadata_consts.ModuleNameConsts.SOCKET])
.tag(metadata_consts.TagConsts.NETWORK_CONNECTION)
.python_compatible()
.functions()
)
files_with_results = searcher.search(socket_functions)
reporter.ast_manual_check(socket_connection_template, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_generic_operating_system_services(app, reporter):
"""check if generic operating system modules could be used to communicate with outside services, files or systems"""
message = (
"The following lines should be inspected during code review. `{}` could be used to receive data from "
"outside or log data to outside. "
)
query = (
metadata_store.query()
.namespace_prefixes(
[
metadata_consts.ModuleNameConsts.ARGPARSE,
metadata_consts.ModuleNameConsts.LOGGING,
metadata_consts.ModuleNameConsts.GETPASS,
]
)
.tag(metadata_consts.TagConsts.GENERIC_OPERATING_SYSTEM_SERVICES)
.python_compatible()
)
components = query.functions() + query.classes()
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_reverse_shell_and_backdoor(app, reporter):
"""check if possible reverse shell exist in python code"""
client = app.python_analyzer_client
for filepath, ast_info in client.get_all_ast_infos():
# subprocess function call usage(e.g. `subprocess.call`, `subprocess.check_output`)
subprocess_usages = set(ast_info.get_module_function_call_usage("subprocess", fuzzy=True))
# `socket.socket` usage
socket_usages = set(ast_info.get_module_function_call_usage("socket", "socket"))
# file descriptor manipulation(e.g. `os.dup`, `os.dup2`)
dup_usages = set(ast_info.get_module_function_call_usage("os", "dup"))
dup2_usages = set(ast_info.get_module_function_call_usage("os", "dup2"))
dup_all_usages = dup_usages | dup2_usages
candidate_subprocess_usage = set()
for dup_usage in dup_all_usages:
for subprocess_usage in subprocess_usages:
for socket_usage in socket_usages:
if ast_info.is_in_same_code_block([dup_usage, subprocess_usage, socket_usage]):
candidate_subprocess_usage.add(subprocess_usage)
for subprocess_usage in candidate_subprocess_usage:
reporter_output = (
"The following lines should be inspected during code review. "
"Possible reverse shell detected in this code block, "
f"`subprocess` module's usage is provided. File: {filepath}, Line: {subprocess_usage.lineno}"
)
reporter.manual_check(
reporter_output,
file_name=filepath,
line_number=subprocess_usage.lineno,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_python_runtime_services(app, reporter):
"""Check if python runtime services could be used to manipulate system python objects"""
message = (
"The following lines should be inspected during code review, "
"Possible system python objects manipulation `{}` found here. "
)
functions = (
metadata_store.query()
.namespace_prefixes(
[
metadata_consts.ModuleNameConsts.GC,
metadata_consts.ModuleNameConsts.INSPECT,
]
)
.python_compatible()
.functions()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(functions)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_custom_python_interpreters(app, reporter):
"""Check if custom python interpreters could be used in malicious code execution"""
message = "The following lines should be inspected during code review, custom python interpreters trying to run unknown code, usage is `{}`"
functions = (
metadata_store.query()
.namespace_prefixes([metadata_consts.ModuleNameConsts.CODE])
.tag(metadata_consts.TagConsts.STRING_EXECUTION)
.python_compatible()
.functions()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(functions)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_python_multimedia_services(app, reporter):
"""Check if multimedia services module could be used to execute unknown-source multimedia files"""
message = (
"The following lines should be inspected during code review, multimedia service module usage `{}` detected."
)
multimedia_modules = [
metadata_consts.ModuleNameConsts.AIFC,
metadata_consts.ModuleNameConsts.SUNAU,
metadata_consts.ModuleNameConsts.WAVE,
metadata_consts.ModuleNameConsts.CHUNK,
]
query = (
metadata_store.query()
.namespace_prefixes(multimedia_modules)
.tag(metadata_consts.TagConsts.FILE_READ_AND_WRITE)
.python_compatible()
)
components = query.functions() + query.classes()
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "manual", "ast")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_data_compression_and_archiving(app, reporter):
"""check if data compression and archiving libraries could be used to read & write files outside of app dir"""
message = (
"The following lines should be inspected during code review, "
"`{}` could be used to create an archiving object, it can read or write file "
"outside of app dir."
)
query = (
metadata_store.query()
.namespace_prefixes(
[
metadata_consts.ModuleNameConsts.GZIP,
metadata_consts.ModuleNameConsts.BZ2,
metadata_consts.ModuleNameConsts.ZIPFILE,
metadata_consts.ModuleNameConsts.TARFILE,
]
)
.tag(metadata_consts.TagConsts.DATA_COMPRESSION)
.python_compatible()
)
components = query.functions() + query.classes()
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_ms_windows_specific_services(app, reporter):
"""Check if MS Windows specific service modules could be used to execute dangerous windows platform commands"""
client = app.python_analyzer_client
for file_path, ast_info in client.get_all_ast_infos():
for module_name in ["msilib", "msvcrt", "_winreg"]:
for lineno in ast_info.get_module_usage(module_name, lineno_only=True):
reporter.manual_check(
"The following lines should be inspected during code review, "
f"MS Windows specific services usage `{module_name}` has been detected",
file_name=file_path,
line_number=lineno,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_optional_operating_system_services(app, reporter):
"""Check for operating system features that are available on selected operating systems only."""
objects = (
metadata_store.query()
.namespace_prefixes([metadata_consts.ModuleNameConsts.MMAP])
.tag(metadata_consts.TagConsts.MEMORY_MAPPING)
.python_compatible()
.collect()
)
message = (
"The following lines should be inspected during code review, operating system feature `{}` has been "
"detected. "
)
files_with_results = AstSearcher(app.python_analyzer_client).search(objects)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_restricted_execution(app, reporter):
"""Check if restricted execution exist in current app"""
client = app.python_analyzer_client
for file_path, ast_info in client.get_all_ast_infos():
for module_name in ["rexec", "Bastion"]:
for lineno in ast_info.get_module_usage(module_name, lineno_only=True):
reporter.manual_check(
"The following lines should be inspected during code review, "
f"restricted execution `{module_name}` has been detected",
file_name=file_path,
line_number=lineno,
)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_internet_protocols_and_support(app, reporter):
"""Check for the use of web server classes, they could be used to start a internal server in current app"""
message = "The following lines should be inspected during code review, web server could be run with `{}`."
objects = (
metadata_store.query()
.namespace_prefixes(
[
metadata_consts.ModuleNameConsts.WSGIREF,
metadata_consts.ModuleNameConsts.SOCKET_SERVER,
metadata_consts.ModuleNameConsts.SOCKET_SERVER_PY3,
metadata_consts.ModuleNameConsts.SIMPLE_XML_RPC_SERVER,
metadata_consts.ModuleNameConsts.DOC_XML_RPC_SERVER,
metadata_consts.ModuleNameConsts.BASE_HTTP_SERVER,
metadata_consts.ModuleNameConsts.HTTP,
]
)
.tag(metadata_consts.TagConsts.WEB_SERVER)
.python_compatible()
.collect()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(objects)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags(
"splunk_appinspect",
"cloud",
"security",
"ast",
"private_app",
"private_victoria",
"migration_victoria",
"private_classic",
)
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_root_privilege_escalation(app, reporter):
"""Check possible root privilege escalation"""
def is_sudo_and_su_usage_exists(call_node, ast_info):
for arg in call_node.args:
for ast_node in ast.walk(arg):
variable = ast_info.get_variable_details(ast_node)
if AstVariable.is_string(variable):
# check exactly match and prefix match
if variable.variable_value in ["su", "sudo"]:
return True
if variable.variable_value.startswith("su ") or variable.variable_value.startswith("sudo "):
return True
return False
check_objects = (
metadata_store.query().tag(metadata_consts.TagConsts.EXTERNAL_COMMAND_EXECUTION).python_compatible().collect()
)
files_with_results = AstSearcher(app.python_analyzer_client).search(
check_objects, node_filter=is_sudo_and_su_usage_exists
)
reporter.ast_fail("Root privilege escalation detected using {}", files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_program_frameworks(app, reporter):
"""Check if program frameworks could be used to interface with web part"""
check_objects = (
metadata_store.query()
.namespace_prefix(metadata_consts.ModuleNameConsts.CMD)
.tag(metadata_consts.TagConsts.EXTERNAL_COMMAND_EXECUTION)
.python_compatible()
.collect()
)
message = "The following lines should be inspected during code review, {}'s derived class could be used to interface with other part of system. "
searcher = AstSearcher(app.python_analyzer_client)
files_with_results = searcher.search(check_objects, node_filter=is_sub_class_def, search_module_usage=True)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_importing_modules(app, reporter):
"""Check Python code for importing modules dynamically."""
components = metadata_store.query().tag(metadata_consts.TagConsts.MODULE_IMPORTING).python_compatible().collect()
message = (
"The following lines should be inspected during code review, Python code `{}` for dynamically importing "
"module has been detected."
)
files_with_results = AstSearcher(app.python_analyzer_client).search(components)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security", "ast", "manual")
@splunk_appinspect.cert_version(min="1.7.0")
def check_for_debugging_and_profiling(app, reporter):
"""Check if debugging library could be used to execute arbitrary commands"""
check_objects = (
metadata_store.query()
.namespace_prefix(metadata_consts.ModuleNameConsts.TRACE)
.tag(metadata_consts.TagConsts.EXTERNAL_COMMAND_EXECUTION)
.python_compatible()
.collect()
)
message = (
"The following lines should be inspected during code review, `{}` could be used to execute arbitrary commands."
)
files_with_results = AstSearcher(app.python_analyzer_client).search(check_objects)
reporter.ast_manual_check(message, files_with_results)
@splunk_appinspect.tags("splunk_appinspect", "cloud", "security")
def check_python_httplib2_version(app, reporter):
"""Check python httplib2 version."""
min_ver = semver.VersionInfo.parse("0.19.1")
ver_rex = '__version__ = "([\d.]+)"'
httplib2_exists = False
py_files = list(app.iterate_files(types=[".py"]))
# Look for __version__ = "0.18" in httplib2/__init__.py
rexs = [RegexBundle(ver_rex)]
matcher = RegexMatcher(rexs)
if py_files:
for directory, file, _ in py_files:
if not (file == "__init__.py" and directory.endswith("httplib2" + os.sep)):
continue
file_path = os.path.join(directory, file)
full_file_path = app.get_filename(file_path)
match_result = matcher.match_file(filepath=full_file_path)
for lineno, result in match_result:
httplib2_exists = True
file_path = os.path.join(directory, file)
# Parse the found version into semver, correcting for
# bad versions like "0.1" without a patch version
try:
ver = re.search(ver_rex, result).groups()[0]
if len(ver.split(".")) == 2:
ver += ".0" # correct for versions without a patch
parsed_ver = semver.VersionInfo.parse(ver)
except Exception as err:
reporter_output = (
"Issue parsing version found in for the python httplib2"
f" ({ver}). File: {file_path}. Error: {err}."
)
reporter.warn(reporter_output, file_path)
continue
if parsed_ver < min_ver:
# Found httplib2 version is less than the minimum
reporter_output = (
"Detected an outdated version of the Python httplib2"
f" ({ver}). Please upgrade to version "
f"{min_ver[0]}.{min_ver[1]}.{min_ver[2]} or later. "
f"File: {file_path}."
)
reporter.warn(reporter_output, file_path)
# httplib2/__init__.py not found
if not httplib2_exists:
reporter_output = "Python httplib2 library not found."
reporter.not_applicable(reporter_output)
class CheckPythonSdkVersion(Check):
def __init__(self):
super().__init__(
config=CheckConfig(
name="check_python_sdk_version",
description="Check that Splunk SDK for Python is up-to-date.",
cert_min_version="2.11.1",
tags=("cloud", "private_app", "private_victoria", "migration_victoria", "private_classic", "future"),
)
)
MINIMUM_SDK_VERSION = semver.VersionInfo.parse("1.6.16")
LATEST_SDK_VERSION = semver.VersionInfo.parse("1.7.3")
@Check.depends_on_matching_files(
patterns=[r"\"User-Agent\": \"splunk-sdk-python/([\d.]+.*)\""],
names=["binding.py"],
not_applicable_message="Splunk SDK for Python not found.",
)
def check_python_sdk(self, app, path_in_app, line_number, result):
# Parse the found version into semver, correcting for
# bad versions like "0.1" without a patch version
ver = result.groups()[0]
try:
if len(ver.split(".")) == 2:
ver += ".0" # correct for versions without a patch
parsed_ver = semver.VersionInfo.parse(ver)
except Exception as err:
yield WarningMessage(
"Issue parsing version found in for the Splunk SDK for " f"Python ({ver}). Error: {err}.",
file_name=path_in_app,
line_number=line_number,
)
return
if parsed_ver < self.MINIMUM_SDK_VERSION:
# Found splunklib version is less than the minimum
yield FailMessage(
"Detected an outdated version of the Splunk SDK for " f"Python ({ver}).",
file_name=path_in_app,
line_number=line_number,
remediation=f"Upgrade to {self.LATEST_SDK_VERSION} or later.",
)
elif self.MINIMUM_SDK_VERSION <= parsed_ver < self.LATEST_SDK_VERSION:
yield WarningMessage(
"Detected an outdated version of the Splunk SDK for " f"Python ({ver}).",
file_name=path_in_app,
line_number=line_number,
remediation=f"Upgrade to {self.LATEST_SDK_VERSION} or later.",
)
else:
yield WarningMessage(
f"Splunk SDK for Python detected (version {ver}).",
file_name=path_in_app,
line_number=line_number,
remediation="No action required at this time.",
)
|
PypiClean
|
/mroylib_min-2.2.5.tar.gz/mroylib_min-2.2.5/qlib/io/tracepoint.py
|
import functools
import requests
import time
import argparse
class TracePoint:
classes = []
funcs = []
flow = []
@staticmethod
def clear():
TracePoint.classes = []
TracePoint.funcs = []
TracePoint.flow = []
def __init__(self, cls, func, t):
if cls not in TracePoint.classes:
TracePoint.classes.append(cls)
if cls not in TracePoint.funcs:
TracePoint.funcs.append(func)
TracePoint.flow.append(",".join([cls,t, func]))
def render_flow(self):
first = TracePoint.flow[0]
recods = set()
for no,i in enumerate(TracePoint.flow[1:]):
cls,t, func = i.split(',',2)
fcls,ft, ffunc = first.split(',', 2)
fn = func.split("(")[0]
ffn = ffunc.split("(")[0]
label = "{l} -> {c}".format(l=ffn, c=fn)
if label in recods:
continue
recods.add(label)
lc,_ = self.get_color(cls, func)
yield """{l} -> {c} [label="<span style='color:gray;'>{t}</span>|<span style='font-size:18px;color:red'>{no}</span>" labelType="html" lineInterpolate=basis arrowheadStyle="fill: {lc}" style="stroke: {lc}; stroke-width: 1px;"];""".format(no=no,l=ffn, c=fn, t=time.ctime(float(t)), lc=lc)
first = i
def render_var(self, one):
cls,t, func = one.strip().split(",", 2)
color, color_f = self.get_color(cls, func)
fn = func.split("(")[0]
tmp = """{func_name} [labelType="html" label="<span style='font-size:28px;color:{color_f}'>{func}</span><span style='color:{color};'>class:{cls}</span>"];""".format(func_name=fn, color=color,color_f=color_f,cls=cls, func=func)
return tmp
def get_color(self, cls, func):
base = 4096 // len(TracePoint.classes)
base_f = 4096 // len(TracePoint.funcs)
c = hex(base * TracePoint.classes.index(cls)).replace("0x", "#")
c_f = hex(base_f * TracePoint.funcs.index(func)).replace("0x", "#")
if len(c) < 4:
c = c + '0'* (4- len(c))
if len(c_f) < 4:
c_f = c_f + '0'* (4- len(c_f))
return c,c_f
def __repr__(self):
TEMP = """
digraph {
/* Note: HTML labels do not work in IE, which lacks support for <foreignObject> tags. */
node [rx=7 ry=7 labelStyle="font: 300 14px 'Helvetica Neue', Helvetica"]
edge [labelStyle="font: 300 14px 'Helvetica Neue', Helvetica"]
%s
}
"""
fcon = "\n\t".join([self.render_var(i) for i in TracePoint.flow])
lcon = "\n\t".join(self.render_flow())
return TEMP % (fcon + lcon)
def trace(cls):
def _func(func):
@functools.wraps(func)
def __run(*args, **kargs):
print(func.__name__, args,"|" ,kargs)
return func(*args, **kargs)
return __run
return _func
def trace_cls(method):
def _trace_cls(cls):
# Get the original implementation
orig_getattribute = cls.__getattribute__
# Make a new definition
def new_getattribute(self, name):
if name in cls.__dict__:
f = getattr(cls, name)
args = "(%s)" % ', '.join(f.__code__.co_varnames)
t = str(time.time())
if "http://" in method:
requests.post("http://localhost:12222/", data={
'class':cls.__name__,
'fun':name + args,
'time':t,
})
else:
with open(method, "a+") as fp:
s = ",".join([cls.__name__,t,name + args])
fp.write(s + "\n")
return orig_getattribute(self, name)
# Attach to the class and return
cls.__getattribute__ = new_getattribute
return cls
return _trace_cls
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-l","--load",default=None,help="loadfile")
parser.add_argument("--url", default='http://localhost:12222',help="debug server")
args = parser.parse_args()
with open(args.load) as fp:
for l in fp:
cls, t, func = l.strip().split(',', 2)
requests.post(args.url, data={
'class':cls,
'fun':func,
'time':t,
})
if __name__ == '__main__':
main()
|
PypiClean
|
/zerospeech-benchmarks-0.9.4.tar.gz/zerospeech-benchmarks-0.9.4/zerospeech/leaderboards/exporters/tde17.py
|
import argparse
import functools
import sys
from io import StringIO
from pathlib import Path
from typing import Optional, List, Dict
import pandas as pd
from rich.console import Console
from zerospeech.leaderboards.tde17 import TDE17Leaderboard, TDE17Entry
from zerospeech.leaderboards.utils import open_json, clean_label, format_score
from .base import LeaderboardExporter, CSVExporter
console = Console()
void_console = Console(file=StringIO())
def restrict_entry(e: TDE17Entry) -> Dict:
_format_score = functools.partial(format_score, percent=False)
return dict(
label=clean_label(e.publication.author_short),
model_id=e.model_id,
submission_di=e.submission_id,
# EN
en_ned=_format_score(e.scores.english.nlp.ned),
en_cov=_format_score(e.scores.english.nlp.coverage),
en_wrds=e.scores.english.nlp.nwords,
# FR
fr_ned=_format_score(e.scores.french.nlp.ned),
fr_cov=_format_score(e.scores.french.nlp.coverage),
fr_wrds=e.scores.french.nlp.nwords,
# Mandarin
cmn_ned=_format_score(e.scores.mandarin.nlp.ned),
cmn_cov=_format_score(e.scores.mandarin.nlp.coverage),
cmn_wrds=e.scores.mandarin.nlp.nwords,
# Wolof
wol_ned=_format_score(e.scores.wolof.nlp.ned),
wol_cov=_format_score(e.scores.wolof.nlp.coverage),
wol_wrds=e.scores.wolof.nlp.nwords,
)
class TDE17Exporter(LeaderboardExporter, CSVExporter):
leaderboard: TDE17Leaderboard
quiet: bool = False
@property
def console(self):
if not self.quiet:
return console
return void_console
def restricted_entries(self):
return [
restrict_entry(e)
for e in self.leaderboard.data
]
@classmethod
def from_cmd(cls, argv: Optional[List[str]] = None):
argv = argv if argv is not None else sys.argv[1:]
parser = argparse.ArgumentParser("ABXLS leaderboard to CSV")
parser.add_argument('location', help='Location of leaderboard (url/path)')
parser.add_argument('-q', '--quiet', action='store_true')
parser.add_argument('-o', '--output-file', default="tde17.csv", help="File to output results")
args = parser.parse_args(argv)
if not args.quiet:
console.print("Loading...", style="bold orange3")
ld_data = open_json(args.location)
# return ld_data
return cls(
leaderboard=ld_data,
quiet=args.quiet,
output_file=Path(args.output_file)
)
def to_csv(self):
df = pd.DataFrame(self.restricted_entries())
self.console.print(f"Writing {self.output_file}...")
df.to_csv(self.output_file)
def cmd():
""" Command line entrypoint """
exp = TDE17Exporter.from_cmd()
# for entry in exp['data']:
# try:
# _ = TDE17Entry.parse_obj(entry)
# except ValidationError as e:
# print(f"failed with: {entry['model_id']}")
exp.export()
exp.console.print("Leaderboard exported successfully", style="bold green")
|
PypiClean
|
/cw_testtool-0.1.41.tar.gz/cw_testtool-0.1.41/cw_testtool/RSAtool.py
|
import os,sys
import base64
import requests
# from Crypto import Random
# from Crypto.Hash import SHA
# from Crypto.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
# from Crypto.Signature import PKCS1_v1_5 as Signature_pkcs1_v1_5
# from Crypto.PublicKey import RSA
import rsa
# https://www.cnblogs.com/hhh5460/p/5243410.html
class prpcrypt():
def __init__(self,mURL,mkeyPth,gkeyPth,isCreateGkey = True,isTest = False):
self.mkeyPth = mkeyPth #master公钥保存地址
self.gkeyPth = gkeyPth #本地密钥保存地址
self.mURL = mURL #master公钥请求地址,
self.gprivate_pem = None
self.gpublic_pem = None
self.mpublic_pem = None
self.isTest = isTest
if isCreateGkey:
#生成新的公私钥对
self.createGhostKey()
# self.saveGhostKeyToFile()
else:
self.readGhostKeyFromFile()
if self.gprivate_pem == None or self.gpublic_pem == None:
self.createGhostKey()
self.saveGhostKeyToFile()
self.masterKeyFromFile()
def saveGhostKeyToFile(self):
f = open(self.gkeyPth + '/gprivate.pem','w')
if sys.version_info > (3,0):
f.write(self.gprivate_pem.save_pkcs1().decode())
else:
f.write(self.gprivate_pem.save_pkcs1())
f.close()
f = open(self.gkeyPth + '/gpublic.pem','w')
if sys.version_info > (3,0):
f.write(self.gpublic_pem.save_pkcs1().decode())
else:
f.write(self.gpublic_pem.save_pkcs1())
f.close()
def saveMPubKeyToFile(self,fname,pemkeypkcs1):
f = open(self.mkeyPth + os.sep + fname + '.pem','w')
if sys.version_info > (3,0):
f.write(pemkeypkcs1)
else:
f.write(pemkeypkcs1)
f.close()
def getGPubkey(self):
if sys.version_info > (3,0):
return self.gpublic_pem.save_pkcs1().decode()
else:
return self.gpublic_pem.save_pkcs1()
def getKeyWithPKCS1(self,pemkeypkcs1):
if sys.version_info > (3,0):
return rsa.PrivateKey.load_pkcs1(pemkeypkcs1.encode())
else:
return rsa.PrivateKey.load_pkcs1(pemkeypkcs1)
def readGhostKeyFromFile(self):
pripth = self.gkeyPth + '/gprivate.pem'
pubpth = self.gkeyPth + '/gpublic.pem'
if os.path.exists(pripth) and os.path.exists(pubpth):
f = open(pripth,'r')
if sys.version_info > (3,0):
self.gprivate_pem = rsa.PrivateKey.load_pkcs1(f.read().encode())
else:
self.gprivate_pem = rsa.PrivateKey.load_pkcs1(f.read())
f.close()
f = open(pubpth,'r')
if sys.version_info > (3,0):
self.gpublic_pem = rsa.PublicKey.load_pkcs1(f.read().encode())
else:
self.gpublic_pem = rsa.PublicKey.load_pkcs1(f.read())
f.close()
else:
print('本地RSA密钥文件不存在,可以文件丢失,请重新生成...')
#从文件读取交互对方的公钥
def masterKeyFromFile(self):
pubpth = self.mkeyPth + '/mpublic.pem'
if os.path.exists(pubpth):
f = open(pubpth,'r')
if sys.version_info > (3,0):
self.mpublic_pem = rsa.PublicKey.load_pkcs1(f.read().encode())
else:
self.mpublic_pem = rsa.PublicKey.load_pkcs1(f.read())
f.close()
else:
self.mpublic_pem = self.requestMasterPubkey()
if self.mpublic_pem != None:
self.saveMasterPubkeyToFile()
if self.mpublic_pem == None:
print('未获取到交互对方公钥...')
def saveMasterPubkeyToFile(self):
pubpth = self.mkeyPth + '/mpublic.pem'
f = open(pubpth,'w')
if sys.version_info > (3,0):
f.write(self.mpublic_pem.save_pkcs1().decode())
else:
f.write(self.mpublic_pem.save_pkcs1())
f.close()
#向服务器请求pubkey
def requestMasterPubkey(self):
if self.mURL[0:4] == 'http':
rurl = self.mURL
try:
res = requests.get(self.mURL, verify=False)
print(res.text)
return res.text
except Exception as e:
print(e)
return None
#生成自已的公私钥对
def createGhostKey(self):
# 伪随机数生成器
# random_generator = Random.new().read
# rsa算法生成实例
# rsa = RSA.generate(1024, random_generator)
# # ghost的秘钥对的生成
# #私钥
# self.gprivate_pem = rsa.exportKey()
# #公钥
# self.gpublic_pem = rsa.publickey().exportKey()
(self.gpublic_pem, self.gprivate_pem) = rsa.newkeys(1024)
#使用交互方公钥加密消息数据
def encryptWithMasterPubKey(self,msg,isBase64Out = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
if isBase64Out:
cipher_text = base64.b64encode(rsa.encrypt(tmpmsg, self.mpublic_pem))
return cipher_text
else:
dmsg = rsa.encrypt(tmpmsg, self.mpublic_pem)
return dmsg
def encryptWithPubKey(self,msg,pubpemkey,isBase64Out = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
if isBase64Out:
cipher_text = base64.b64encode(rsa.encrypt(tmpmsg, pubpemkey))
return cipher_text
else:
dmsg = rsa.encrypt(tmpmsg, pubpemkey)
return dmsg
#使用交互方公钥验证签名
def verifyWithMasterPubKey(self,msg,sign,isBase64In = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
if isBase64In:
dmsg = base64.b64decode(tmpmsg)
vermsg = rsa.verify(dmsg, sign, self.mpublic_pem)
return vermsg
#使用本地公钥加密消息数据
def encryptWithGhostPubKey(self,msg,isBase64Out = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
if isBase64Out:
cipher_text = base64.b64encode(rsa.encrypt(tmpmsg, self.gpublic_pem))
return cipher_text
else:
dmsg = rsa.encrypt(tmpmsg, self.gpublic_pem)
return dmsg
#使用本地私钥解密消息
def decryptWithGhostPriKey(self,msg,isBase64In = True):
dmsg = msg
if isBase64In:
dmsg = base64.b64decode(msg)
text = rsa.decrypt(dmsg, self.gprivate_pem)
return text
def enbase64(self,msg):
dmsg = base64.b64encode(msg)
return dmsg
def debase64(self,msg):
dmsg = base64.b64decode(msg)
return dmsg
#使用本地私钥签名消息
def signWithGhostPriKey(self,msg):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
signature = rsa.sign(tmpmsg, self.gprivate_pem, 'SHA-1')
return signature
#使用本地公钥验证签名
def verifyWithGhostPubKey(self,msg,sign,isBase64In = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
dmsg = tmpmsg
try:
if isBase64In:
dmsg = base64.b64decode(msg)
vermsg = rsa.verify(dmsg, sign, self.gpublic_pem)
return vermsg
except Exception as e:
return False
#使用本地公钥验证签名
def verifyWithPubKey(self,msg,sign,pubpemkey,isBase64In = True):
tmpmsg = msg
if sys.version_info > (3,0):
tmpmsg = msg.encode()
dmsg = tmpmsg
if isBase64In:
dmsg = base64.b64decode(msg)
vermsg = rsa.verify(dmsg, sign, pubpemkey)
return vermsg
if __name__ == '__main__':
pc = prpcrypt(mURL='', mkeyPth = '.', gkeyPth = '.',isCreateGkey = True)
msg = 'abcdefg---001'
pmsg = pc.encryptWithGhostPubKey(msg)
omsg = pc.decryptWithGhostPriKey(pmsg)
smsg = pc.signWithGhostPriKey(msg)
b64msg = pc.enbase64(msg)
vmsg = pc.verifyWithGhostPubKey(b64msg, smsg)
print('msg--->',msg)
print('pmsg-->',pmsg)
print('omsg-->',omsg)
print('smg-->',smsg)
print('vmsg-->',vmsg)
|
PypiClean
|
/NehorayRapid-0.0.1-py3-none-any.whl/mmedit/datasets/sr_vimeo90k_multiple_gt_dataset.py
|
import os.path as osp
from .base_sr_dataset import BaseSRDataset
from .registry import DATASETS
crf_test = {
0 : "BD_start_1",
15: "BD_start_1_crf_15",
25: "BD_start_1_crf_25",
35: "BD_start_1_crf_35",
}
@DATASETS.register_module()
class SRVimeo90KMultipleGTDataset(BaseSRDataset):
"""Vimeo90K dataset for video super resolution for recurrent networks.
The dataset loads several LQ (Low-Quality) frames and GT (Ground-Truth)
frames. Then it applies specified transforms and finally returns a dict
containing paired data and other information.
It reads Vimeo90K keys from the txt file. Each line contains:
1. video frame folder
2. image shape
Examples:
::
00001/0266 (256,448,3)
00001/0268 (256,448,3)
Args:
lq_folder (str | :obj:`Path`): Path to a lq folder.
gt_folder (str | :obj:`Path`): Path to a gt folder.
ann_file (str | :obj:`Path`): Path to the annotation file.
pipeline (list[dict | callable]): A sequence of data transformations.
scale (int): Upsampling scale ratio.
num_input_frames (int): Number of frames in each training sequence.
Default: 7.
test_mode (bool): Store `True` when building test dataset.
Default: `False`.
"""
def __init__(self,
lq_folder,
gt_folder,
ann_file,
pipeline,
scale,
num_input_frames=7,
test_mode=False,
crf=-1):
super().__init__(pipeline, scale, test_mode)
self.lq_folder = str(lq_folder)
self.gt_folder = str(gt_folder)
self.ann_file = str(ann_file)
self.num_input_frames = num_input_frames
self.crf = crf
if test_mode == True and crf in crf_test:
self.lq_folder = self.lq_folder.replace(crf_test[0], crf_test[self.crf])
print(self.crf, self.lq_folder)
self.data_infos = self.load_annotations()
def load_annotations(self):
"""Load annoations for Vimeo-90K dataset.
Returns:
dict: Returned dict for LQ and GT pairs.
"""
# get keys
with open(self.ann_file, 'r') as fin:
keys = [line.strip().split(' ')[0] for line in fin]
data_infos = []
for key in keys:
lq_paths = [
osp.join(self.lq_folder, key, f'im{i}.png')
for i in range(1, self.num_input_frames + 1)
]
gt_paths = [
osp.join(self.gt_folder, key, f'im{i}.png')
for i in range(1, self.num_input_frames + 1)
]
data_infos.append(
dict(lq_path=lq_paths, gt_path=gt_paths, key=key))
return data_infos
|
PypiClean
|
/amundsen_databuilder-7.4.4-py3-none-any.whl/databuilder/extractor/base_postgres_metadata_extractor.py
|
import abc
import logging
from collections import namedtuple
from itertools import groupby
from typing import (
Any, Dict, Iterator, Union,
)
from pyhocon import ConfigFactory, ConfigTree
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
from databuilder.extractor.sql_alchemy_extractor import SQLAlchemyExtractor
from databuilder.models.table_metadata import ColumnMetadata, TableMetadata
TableKey = namedtuple('TableKey', ['schema', 'table_name'])
LOGGER = logging.getLogger(__name__)
class BasePostgresMetadataExtractor(Extractor):
"""
Extracts Postgres table and column metadata from underlying meta store database using SQLAlchemyExtractor
"""
# CONFIG KEYS
WHERE_CLAUSE_SUFFIX_KEY = 'where_clause_suffix'
CLUSTER_KEY = 'cluster_key'
USE_CATALOG_AS_CLUSTER_NAME = 'use_catalog_as_cluster_name'
DATABASE_KEY = 'database_key'
# Default values
DEFAULT_CLUSTER_NAME = 'master'
DEFAULT_CONFIG = ConfigFactory.from_dict(
{WHERE_CLAUSE_SUFFIX_KEY: 'true', CLUSTER_KEY: DEFAULT_CLUSTER_NAME, USE_CATALOG_AS_CLUSTER_NAME: True}
)
@abc.abstractmethod
def get_sql_statement(self, use_catalog_as_cluster_name: bool, where_clause_suffix: str) -> Any:
"""
:return: Provides a record or None if no more to extract
"""
return None
def init(self, conf: ConfigTree) -> None:
conf = conf.with_fallback(BasePostgresMetadataExtractor.DEFAULT_CONFIG)
self._cluster = conf.get_string(BasePostgresMetadataExtractor.CLUSTER_KEY)
self._database = conf.get_string(BasePostgresMetadataExtractor.DATABASE_KEY, default='postgres')
self.sql_stmt = self.get_sql_statement(
use_catalog_as_cluster_name=conf.get_bool(BasePostgresMetadataExtractor.USE_CATALOG_AS_CLUSTER_NAME),
where_clause_suffix=conf.get_string(BasePostgresMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
)
self._alchemy_extractor = SQLAlchemyExtractor()
sql_alch_conf = Scoped.get_scoped_conf(conf, self._alchemy_extractor.get_scope())\
.with_fallback(ConfigFactory.from_dict({SQLAlchemyExtractor.EXTRACT_SQL: self.sql_stmt}))
self.sql_stmt = sql_alch_conf.get_string(SQLAlchemyExtractor.EXTRACT_SQL)
LOGGER.info('SQL for postgres metadata: %s', self.sql_stmt)
self._alchemy_extractor.init(sql_alch_conf)
self._extract_iter: Union[None, Iterator] = None
def extract(self) -> Union[TableMetadata, None]:
if not self._extract_iter:
self._extract_iter = self._get_extract_iter()
try:
return next(self._extract_iter)
except StopIteration:
return None
def _get_extract_iter(self) -> Iterator[TableMetadata]:
"""
Using itertools.groupby and raw level iterator, it groups to table and yields TableMetadata
:return:
"""
for key, group in groupby(self._get_raw_extract_iter(), self._get_table_key):
columns = []
for row in group:
last_row = row
columns.append(ColumnMetadata(row['col_name'], row['col_description'],
row['col_type'], row['col_sort_order']))
yield TableMetadata(self._database, last_row['cluster'],
last_row['schema'],
last_row['name'],
last_row['description'],
columns)
def _get_raw_extract_iter(self) -> Iterator[Dict[str, Any]]:
"""
Provides iterator of result row from SQLAlchemy extractor
:return:
"""
row = self._alchemy_extractor.extract()
while row:
yield row
row = self._alchemy_extractor.extract()
def _get_table_key(self, row: Dict[str, Any]) -> Union[TableKey, None]:
"""
Table key consists of schema and table name
:param row:
:return:
"""
if row:
return TableKey(schema=row['schema'], table_name=row['name'])
return None
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/web/list_web_app_site_push_settings.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ListWebAppSitePushSettingsResult',
'AwaitableListWebAppSitePushSettingsResult',
'list_web_app_site_push_settings',
'list_web_app_site_push_settings_output',
]
@pulumi.output_type
class ListWebAppSitePushSettingsResult:
"""
Push settings for the App.
"""
def __init__(__self__, dynamic_tags_json=None, id=None, is_push_enabled=None, kind=None, name=None, tag_whitelist_json=None, tags_requiring_auth=None, type=None):
if dynamic_tags_json and not isinstance(dynamic_tags_json, str):
raise TypeError("Expected argument 'dynamic_tags_json' to be a str")
pulumi.set(__self__, "dynamic_tags_json", dynamic_tags_json)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_push_enabled and not isinstance(is_push_enabled, bool):
raise TypeError("Expected argument 'is_push_enabled' to be a bool")
pulumi.set(__self__, "is_push_enabled", is_push_enabled)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tag_whitelist_json and not isinstance(tag_whitelist_json, str):
raise TypeError("Expected argument 'tag_whitelist_json' to be a str")
pulumi.set(__self__, "tag_whitelist_json", tag_whitelist_json)
if tags_requiring_auth and not isinstance(tags_requiring_auth, str):
raise TypeError("Expected argument 'tags_requiring_auth' to be a str")
pulumi.set(__self__, "tags_requiring_auth", tags_requiring_auth)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dynamicTagsJson")
def dynamic_tags_json(self) -> Optional[str]:
"""
Gets or sets a JSON string containing a list of dynamic tags that will be evaluated from user claims in the push registration endpoint.
"""
return pulumi.get(self, "dynamic_tags_json")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isPushEnabled")
def is_push_enabled(self) -> bool:
"""
Gets or sets a flag indicating whether the Push endpoint is enabled.
"""
return pulumi.get(self, "is_push_enabled")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="tagWhitelistJson")
def tag_whitelist_json(self) -> Optional[str]:
"""
Gets or sets a JSON string containing a list of tags that are whitelisted for use by the push registration endpoint.
"""
return pulumi.get(self, "tag_whitelist_json")
@property
@pulumi.getter(name="tagsRequiringAuth")
def tags_requiring_auth(self) -> Optional[str]:
"""
Gets or sets a JSON string containing a list of tags that require user authentication to be used in the push registration endpoint.
Tags can consist of alphanumeric characters and the following:
'_', '@', '#', '.', ':', '-'.
Validation should be performed at the PushRequestHandler.
"""
return pulumi.get(self, "tags_requiring_auth")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableListWebAppSitePushSettingsResult(ListWebAppSitePushSettingsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListWebAppSitePushSettingsResult(
dynamic_tags_json=self.dynamic_tags_json,
id=self.id,
is_push_enabled=self.is_push_enabled,
kind=self.kind,
name=self.name,
tag_whitelist_json=self.tag_whitelist_json,
tags_requiring_auth=self.tags_requiring_auth,
type=self.type)
def list_web_app_site_push_settings(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListWebAppSitePushSettingsResult:
"""
Description for Gets the Push settings associated with web app.
Azure REST API version: 2022-09-01.
:param str name: Name of web app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:web:listWebAppSitePushSettings', __args__, opts=opts, typ=ListWebAppSitePushSettingsResult).value
return AwaitableListWebAppSitePushSettingsResult(
dynamic_tags_json=pulumi.get(__ret__, 'dynamic_tags_json'),
id=pulumi.get(__ret__, 'id'),
is_push_enabled=pulumi.get(__ret__, 'is_push_enabled'),
kind=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
tag_whitelist_json=pulumi.get(__ret__, 'tag_whitelist_json'),
tags_requiring_auth=pulumi.get(__ret__, 'tags_requiring_auth'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(list_web_app_site_push_settings)
def list_web_app_site_push_settings_output(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListWebAppSitePushSettingsResult]:
"""
Description for Gets the Push settings associated with web app.
Azure REST API version: 2022-09-01.
:param str name: Name of web app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
...
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/information_protection/sensitivity_labels/evaluate/evaluate_post_request_body.py
|
from __future__ import annotations
from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ....models import current_label, discovered_sensitive_type
class EvaluatePostRequestBody(AdditionalDataHolder, Parsable):
def __init__(self,) -> None:
"""
Instantiates a new evaluatePostRequestBody and sets the default values.
"""
# Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
self._additional_data: Dict[str, Any] = {}
# The currentLabel property
self._current_label: Optional[current_label.CurrentLabel] = None
# The discoveredSensitiveTypes property
self._discovered_sensitive_types: Optional[List[discovered_sensitive_type.DiscoveredSensitiveType]] = None
@property
def additional_data(self,) -> Dict[str, Any]:
"""
Gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Returns: Dict[str, Any]
"""
return self._additional_data
@additional_data.setter
def additional_data(self,value: Dict[str, Any]) -> None:
"""
Sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Args:
value: Value to set for the AdditionalData property.
"""
self._additional_data = value
@staticmethod
def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> EvaluatePostRequestBody:
"""
Creates a new instance of the appropriate class based on discriminator value
Args:
parseNode: The parse node to use to read the discriminator value and create the object
Returns: EvaluatePostRequestBody
"""
if parse_node is None:
raise Exception("parse_node cannot be undefined")
return EvaluatePostRequestBody()
@property
def current_label(self,) -> Optional[current_label.CurrentLabel]:
"""
Gets the currentLabel property value. The currentLabel property
Returns: Optional[current_label.CurrentLabel]
"""
return self._current_label
@current_label.setter
def current_label(self,value: Optional[current_label.CurrentLabel] = None) -> None:
"""
Sets the currentLabel property value. The currentLabel property
Args:
value: Value to set for the current_label property.
"""
self._current_label = value
@property
def discovered_sensitive_types(self,) -> Optional[List[discovered_sensitive_type.DiscoveredSensitiveType]]:
"""
Gets the discoveredSensitiveTypes property value. The discoveredSensitiveTypes property
Returns: Optional[List[discovered_sensitive_type.DiscoveredSensitiveType]]
"""
return self._discovered_sensitive_types
@discovered_sensitive_types.setter
def discovered_sensitive_types(self,value: Optional[List[discovered_sensitive_type.DiscoveredSensitiveType]] = None) -> None:
"""
Sets the discoveredSensitiveTypes property value. The discoveredSensitiveTypes property
Args:
value: Value to set for the discovered_sensitive_types property.
"""
self._discovered_sensitive_types = value
def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
from ....models import current_label, discovered_sensitive_type
fields: Dict[str, Callable[[Any], None]] = {
"currentLabel": lambda n : setattr(self, 'current_label', n.get_object_value(current_label.CurrentLabel)),
"discoveredSensitiveTypes": lambda n : setattr(self, 'discovered_sensitive_types', n.get_collection_of_object_values(discovered_sensitive_type.DiscoveredSensitiveType)),
}
return fields
def serialize(self,writer: SerializationWriter) -> None:
"""
Serializes information the current object
Args:
writer: Serialization writer to use to serialize this model
"""
if writer is None:
raise Exception("writer cannot be undefined")
writer.write_object_value("currentLabel", self.current_label)
writer.write_collection_of_object_values("discoveredSensitiveTypes", self.discovered_sensitive_types)
writer.write_additional_data_value(self.additional_data)
|
PypiClean
|
/rxn_chem_utils-1.3.0-py3-none-any.whl/rxn/chemutils/smiles_augmenter.py
|
import logging
import random
from typing import Callable, List
from .miscellaneous import apply_to_any_smiles, apply_to_smiles_groups
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class SmilesAugmenter:
"""
Class to augment any kind of SMILES string with the help of randomization
and shuffling.
"""
def __init__(
self,
augmentation_fn: Callable[[str], str],
augmentation_probability: float = 1.0,
shuffle: bool = True,
ignore_exceptions: bool = True,
):
"""
Args:
augmentation_fn: Function for augmenting the individual SMILES strings,
such as the functions provided in smiles_randomization.py.
augmentation_probability: Probability with which to augment individual
SMILES strings.
shuffle: Whether to shuffle the order of the compounds.
ignore_exceptions: Whether to ignore the error (and return the
original string) when an augmentation fails. If False, exceptions
will be propagated.
"""
self.augmentation_fn = augmentation_fn
self.augmentation_probability = augmentation_probability
self.shuffle = shuffle
self.ignore_exceptions = ignore_exceptions
def augment(self, smiles: str, number_augmentations: int) -> List[str]:
"""
Augment one SMILES string (of any kind).
Args:
smiles: SMILES string to augment.
number_augmentations: how many times to do the augmentation.
"""
# augmentation of the individual compound SMILES
augmented = [
apply_to_any_smiles(
smiles, self._augment_with_probability, force_multicomponent=True
)
for _ in range(number_augmentations)
]
# shuffle the order of the compounds
if self.shuffle:
augmented = [
apply_to_smiles_groups(s, SmilesAugmenter._shuffle) for s in augmented
]
return augmented
def _augment_with_probability(self, smiles: str) -> str:
"""Augmentat a SMILES, with the probability given by the member variable."""
# Note: no need to call random.uniform if the augmentation probability is 1.0.
if (
self.augmentation_probability == 1.0
or random.uniform(0, 1) <= self.augmentation_probability
):
try:
return self.augmentation_fn(smiles)
except Exception as e:
if self.ignore_exceptions:
logger.warning(f"Augmentation failed for {smiles}: {e}")
return smiles
else:
raise
# no augmentation
return smiles
@staticmethod
def _shuffle(smiles_list: List[str]) -> List[str]:
smiles_list = smiles_list.copy()
random.shuffle(smiles_list)
return smiles_list
|
PypiClean
|
/text_animator-0.1.0-py3-none-any.whl/text_animator/animator.py
|
import random
from time import sleep
from enum import Enum
from ascii_animator import Animator, Animation, Speed
class Effect(Enum):
RANDOM = 1
LEFT_TO_RIGHT = 2
RIGHT_TO_LEFT = 3
class Border():
def __init__(self, tm=1, bm=1, lm=1, rm=1, tp=1, bp=1, lp=1, rp=1):
self.tm = tm
self.bm = bm
self.lm = lm
self.rm = rm
self.tp = tp
self.bp = bp
self.lp = lp
self.rp = rp
def get_border(self, text_length, text_height):
border = []
# right edge
re = self.lm + self.lp + 1 + text_length + self.rp # 1 is for the left border char
# top edge
te = self.tm
# bottom edge
be = self.tm + 1 + self.tp + text_height + self.bp # 1 is for the top border char
# right bottom edge
border.append(((be, re), 9499)) # ┛
# right bottom padding - right text height - right top padding
y = be
for _ in range(self.bp + text_height + self.tp):
y -= 1
border.append(((y, re), 9475)) # ┃
# right top edge
border.append(((self.tm, re), 9491)) # ┓
# upper border
for index in range(re, self.lm, -1): # ━ 9473
border.append(((self.tm, index), 9473))
# left top edge
border.append(((self.tm, self.lm), 9487)) # ┏ 9487
# left top padding - left text height - left bottom padding
y = te
for _ in range(self.tp + text_height + self.bp):
y += 1
border.append(((y, self.lm), 9475)) # ┃ 9475
# left bottom edge
border.append(((be, self.lm), 9495)) # ┗ 9495
# lower border
for index in range(self.lm, re):
border.append(((be, index), 9473)) # ━ 9473
return border
@property
def text_start_pos(self):
return((self.tm + 1 + self.tp, self.lm + 1 + self.lp))
@property
def height(self):
return(self.tm + 1 + self.tp + self.bp + 1 + self.bm)
@property
def length(self):
return(self.lm + 1 + self.lp + self.rp + 1 + self.rm)
class TextAnimation(Animation):
def __init__(self, text, effect=Effect.LEFT_TO_RIGHT, speed=Speed.FAST, border=None):
self.text = text
self.effect = effect
self.speed = speed
self.text_lines = [line.strip() for line in self.text.splitlines() if line]
text_length = len(max(self.text_lines, key=len))
text_height = len(self.text_lines)
length = text_length
height = text_height
self.border = border
if self.border:
height += self.border.height
length += self.border.length
self.y_size = height
self.x_size = length
self.clear_grid()
self._text = self._init_text()
self._border = None
if self.border:
self._border = border.get_border(text_length, text_height)
def _init_text(self):
def _add_random():
chars = []
for line_count, line in enumerate(self.text_lines):
for item in random.sample(list(enumerate(line)), len(line)):
index = item[0]
char = item[1]
chars.append(((y_start_pos + line_count, x_start_pos + index), ord(char)))
for item in random.sample(chars, len(chars)):
items.append(item)
def _add_left_to_right():
chars = []
for line_count, line in enumerate(self.text_lines):
for index, char in enumerate(line):
chars.append(((y_start_pos + line_count, x_start_pos + index), ord(char)))
for item in reversed(chars):
items.append(item)
def _add_right_to_left():
for line_count, line in enumerate(self.text_lines):
for index, char in enumerate(line):
items.append(((y_start_pos + line_count, x_start_pos + index), ord(char)))
y_start_pos = 0
x_start_pos = 0
if self.border:
(y_start_pos, x_start_pos) = self.border.text_start_pos
items = []
if self.effect == Effect.RANDOM:
_add_random()
elif self.effect == Effect.LEFT_TO_RIGHT:
_add_left_to_right()
elif self.effect == Effect.RIGHT_TO_LEFT:
_add_right_to_left()
return items
def _update_grid(self, items):
item = items.pop()
y_pos = item[0][0]
x_pos = item[0][1]
ucode = item[1]
self._grid[y_pos][x_pos] = chr(ucode)
@property
def grid(self):
return self._grid
def clear_grid(self):
self._grid = [[' ' for x in range(self.x_size)] for y in range(self.y_size)]
def cycle(self):
if self._text:
self._update_grid(self._text)
else:
if self._border:
self._update_grid(self._border)
else:
return True
def __call__(self):
Animator(animation=self, speed=self.speed, show_axis=False, max_loops=1)
|
PypiClean
|
/yt-dlp-2023.7.6.tar.gz/yt-dlp-2023.7.6/yt_dlp/extractor/alsace20tv.py
|
from .common import InfoExtractor
from ..utils import (
clean_html,
dict_get,
get_element_by_class,
int_or_none,
unified_strdate,
url_or_none,
)
class Alsace20TVBaseIE(InfoExtractor):
def _extract_video(self, video_id, url=None):
info = self._download_json(
'https://www.alsace20.tv/visionneuse/visio_v9_js.php?key=%s&habillage=0&mode=html' % (video_id, ),
video_id) or {}
title = info.get('titre')
formats = []
for res, fmt_url in (info.get('files') or {}).items():
formats.extend(
self._extract_smil_formats(fmt_url, video_id, fatal=False)
if '/smil:_' in fmt_url
else self._extract_mpd_formats(fmt_url, video_id, mpd_id=res, fatal=False))
webpage = (url and self._download_webpage(url, video_id, fatal=False)) or ''
thumbnail = url_or_none(dict_get(info, ('image', 'preview', )) or self._og_search_thumbnail(webpage))
upload_date = self._search_regex(r'/(\d{6})_', thumbnail, 'upload_date', default=None)
upload_date = unified_strdate('20%s-%s-%s' % (upload_date[:2], upload_date[2:4], upload_date[4:])) if upload_date else None
return {
'id': video_id,
'title': title,
'formats': formats,
'description': clean_html(get_element_by_class('wysiwyg', webpage)),
'upload_date': upload_date,
'thumbnail': thumbnail,
'duration': int_or_none(self._og_search_property('video:duration', webpage) if webpage else None),
'view_count': int_or_none(info.get('nb_vues')),
}
class Alsace20TVIE(Alsace20TVBaseIE):
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/(?:[\w-]+/)+[\w-]+-(?P<id>[\w]+)'
_TESTS = [{
'url': 'https://www.alsace20.tv/VOD/Actu/JT/Votre-JT-jeudi-3-fevrier-lyNHCXpYJh.html',
'info_dict': {
'id': 'lyNHCXpYJh',
'ext': 'mp4',
'description': 'md5:fc0bc4a0692d3d2dba4524053de4c7b7',
'title': 'Votre JT du jeudi 3 février',
'upload_date': '20220203',
'thumbnail': r're:https?://.+\.jpg',
'duration': 1073,
'view_count': int,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
return self._extract_video(video_id, url)
class Alsace20TVEmbedIE(Alsace20TVBaseIE):
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/emb/(?P<id>[\w]+)'
_TESTS = [{
'url': 'https://www.alsace20.tv/emb/lyNHCXpYJh',
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
'info_dict': {
'id': 'lyNHCXpYJh',
'ext': 'mp4',
'title': 'Votre JT du jeudi 3 février',
'upload_date': '20220203',
'thumbnail': r're:https?://.+\.jpg',
'view_count': int,
},
'params': {
'format': 'bestvideo',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
return self._extract_video(video_id)
|
PypiClean
|
/nni_daily-1.5.2005180104-py3-none-manylinux1_x86_64.whl/nni_daily-1.5.2005180104.data/data/nni/node_modules/ssh2/README.md
|
# Description
SSH2 client and server modules written in pure JavaScript for [node.js](http://nodejs.org/).
Development/testing is done against OpenSSH (7.1 currently).
[Changes from v0.4.x-v0.5.x](https://github.com/mscdex/ssh2/wiki/Changes-from-0.4.x-to-0.5.x)
[](https://travis-ci.org/mscdex/ssh2)
# Table of Contents
* [Requirements](#requirements)
* [Installation](#installation)
* [Client Examples](#client-examples)
* [Execute `uptime` on a server](#execute-uptime-on-a-server)
* [Start an interactive shell session](#start-an-interactive-shell-session)
* [Send a raw HTTP request to port 80 on the server](#send-a-raw-http-request-to-port-80-on-the-server)
* [Forward local connections to port 8000 on the server to us](#forward-local-connections-to-port-8000-on-the-server-to-us)
* [Get a directory listing via SFTP](#get-a-directory-listing-via-sftp)
* [Connection hopping](#connection-hopping)
* [Forward remote X11 connections](#forward-remote-x11-connections)
* [Dynamic (1:1) port forwarding using a SOCKSv5 proxy (using `socksv5`)](#dynamic-11-port-forwarding-using-a-socksv5-proxy-using-socksv5)
* [Invoke an arbitrary subsystem (e.g. netconf)](#invoke-an-arbitrary-subsystem)
* [Server Examples](#server-examples)
* [Password and public key authentication and non-interactive (exec) command execution](#password-and-public-key-authentication-and-non-interactive-exec-command-execution)
* [SFTP-only server](#sftp-only-server)
* [API](#api)
* [Client](#client)
* [Client events](#client-events)
* [Client methods](#client-methods)
* [Server](#server)
* [Server events](#server-events)
* [Server methods](#server-methods)
* [Connection events](#connection-events)
* [Connection methods](#connection-methods)
* [Session events](#session-events)
* [Channel](#channel)
* [Pseudo-TTY settings](#pseudo-tty-settings)
* [Terminal modes](#terminal-modes)
## Requirements
* [node.js](http://nodejs.org/) -- v4.5.0 or newer
## Installation
npm install ssh2
## Client Examples
### Execute `uptime` on a server
```js
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.exec('uptime', function(err, stream) {
if (err) throw err;
stream.on('close', function(code, signal) {
console.log('Stream :: close :: code: ' + code + ', signal: ' + signal);
conn.end();
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
privateKey: require('fs').readFileSync('/here/is/my/key')
});
// example output:
// Client :: ready
// STDOUT: 17:41:15 up 22 days, 18:09, 1 user, load average: 0.00, 0.01, 0.05
//
// Stream :: exit :: code: 0, signal: undefined
// Stream :: close
```
### Start an interactive shell session
```js
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.shell(function(err, stream) {
if (err) throw err;
stream.on('close', function() {
console.log('Stream :: close');
conn.end();
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
stream.end('ls -l\nexit\n');
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
privateKey: require('fs').readFileSync('/here/is/my/key')
});
// example output:
// Client :: ready
// STDOUT: Last login: Sun Jun 15 09:37:21 2014 from 192.168.100.100
//
// STDOUT: ls -l
// exit
//
// STDOUT: frylock@athf:~$ ls -l
//
// STDOUT: total 8
//
// STDOUT: drwxr-xr-x 2 frylock frylock 4096 Nov 18 2012 mydir
//
// STDOUT: -rw-r--r-- 1 frylock frylock 25 Apr 11 2013 test.txt
//
// STDOUT: frylock@athf:~$ exit
//
// STDOUT: logout
//
// Stream :: close
```
### Send a raw HTTP request to port 80 on the server
```js
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.forwardOut('192.168.100.102', 8000, '127.0.0.1', 80, function(err, stream) {
if (err) throw err;
stream.on('close', function() {
console.log('TCP :: CLOSED');
conn.end();
}).on('data', function(data) {
console.log('TCP :: DATA: ' + data);
}).end([
'HEAD / HTTP/1.1',
'User-Agent: curl/7.27.0',
'Host: 127.0.0.1',
'Accept: */*',
'Connection: close',
'',
''
].join('\r\n'));
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
password: 'nodejsrules'
});
// example output:
// Client :: ready
// TCP :: DATA: HTTP/1.1 200 OK
// Date: Thu, 15 Nov 2012 13:52:58 GMT
// Server: Apache/2.2.22 (Ubuntu)
// X-Powered-By: PHP/5.4.6-1ubuntu1
// Last-Modified: Thu, 01 Jan 1970 00:00:00 GMT
// Content-Encoding: gzip
// Vary: Accept-Encoding
// Connection: close
// Content-Type: text/html; charset=UTF-8
//
//
// TCP :: CLOSED
```
### Forward local connections to port 8000 on the server to us
```js
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.forwardIn('127.0.0.1', 8000, function(err) {
if (err) throw err;
console.log('Listening for connections on server on port 8000!');
});
}).on('tcp connection', function(info, accept, reject) {
console.log('TCP :: INCOMING CONNECTION:');
console.dir(info);
accept().on('close', function() {
console.log('TCP :: CLOSED');
}).on('data', function(data) {
console.log('TCP :: DATA: ' + data);
}).end([
'HTTP/1.1 404 Not Found',
'Date: Thu, 15 Nov 2012 02:07:58 GMT',
'Server: ForwardedConnection',
'Content-Length: 0',
'Connection: close',
'',
''
].join('\r\n'));
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
password: 'nodejsrules'
});
// example output:
// Client :: ready
// Listening for connections on server on port 8000!
// (.... then from another terminal on the server: `curl -I http://127.0.0.1:8000`)
// TCP :: INCOMING CONNECTION: { destIP: '127.0.0.1',
// destPort: 8000,
// srcIP: '127.0.0.1',
// srcPort: 41969 }
// TCP DATA: HEAD / HTTP/1.1
// User-Agent: curl/7.27.0
// Host: 127.0.0.1:8000
// Accept: */*
//
//
// TCP :: CLOSED
```
### Get a directory listing via SFTP
```js
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('foo', function(err, list) {
if (err) throw err;
console.dir(list);
conn.end();
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
password: 'nodejsrules'
});
// example output:
// Client :: ready
// [ { filename: 'test.txt',
// longname: '-rw-r--r-- 1 frylock frylock 12 Nov 18 11:05 test.txt',
// attrs:
// { size: 12,
// uid: 1000,
// gid: 1000,
// mode: 33188,
// atime: 1353254750,
// mtime: 1353254744 } },
// { filename: 'mydir',
// longname: 'drwxr-xr-x 2 frylock frylock 4096 Nov 18 15:03 mydir',
// attrs:
// { size: 1048576,
// uid: 1000,
// gid: 1000,
// mode: 16877,
// atime: 1353269007,
// mtime: 1353269007 } } ]
```
### Connection hopping
```js
var Client = require('ssh2').Client;
var conn1 = new Client();
var conn2 = new Client();
conn1.on('ready', function() {
console.log('FIRST :: connection ready');
conn1.exec('nc 192.168.1.2 22', function(err, stream) {
if (err) {
console.log('FIRST :: exec error: ' + err);
return conn1.end();
}
conn2.connect({
sock: stream,
username: 'user2',
password: 'password2',
});
});
}).connect({
host: '192.168.1.1',
username: 'user1',
password: 'password1',
});
conn2.on('ready', function() {
console.log('SECOND :: connection ready');
conn2.exec('uptime', function(err, stream) {
if (err) {
console.log('SECOND :: exec error: ' + err);
return conn1.end();
}
stream.on('end', function() {
conn1.end(); // close parent (and this) connection
}).on('data', function(data) {
console.log(data.toString());
});
});
});
```
### Forward remote X11 connections
```js
var net = require('net');
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('x11', function(info, accept, reject) {
var xserversock = new net.Socket();
xserversock.on('connect', function() {
var xclientsock = accept();
xclientsock.pipe(xserversock).pipe(xclientsock);
});
// connects to localhost:0.0
xserversock.connect(6000, 'localhost');
});
conn.on('ready', function() {
conn.exec('xeyes', { x11: true }, function(err, stream) {
if (err) throw err;
var code = 0;
stream.on('end', function() {
if (code !== 0)
console.log('Do you have X11 forwarding enabled on your SSH server?');
conn.end();
}).on('exit', function(exitcode) {
code = exitcode;
});
});
}).connect({
host: '192.168.1.1',
username: 'foo',
password: 'bar'
});
```
### Dynamic (1:1) port forwarding using a SOCKSv5 proxy (using [socksv5](https://github.com/mscdex/socksv5))
```js
var socks = require('socksv5');
var Client = require('ssh2').Client;
var ssh_config = {
host: '192.168.100.1',
port: 22,
username: 'nodejs',
password: 'rules'
};
socks.createServer(function(info, accept, deny) {
// NOTE: you could just use one ssh2 client connection for all forwards, but
// you could run into server-imposed limits if you have too many forwards open
// at any given time
var conn = new Client();
conn.on('ready', function() {
conn.forwardOut(info.srcAddr,
info.srcPort,
info.dstAddr,
info.dstPort,
function(err, stream) {
if (err) {
conn.end();
return deny();
}
var clientSocket;
if (clientSocket = accept(true)) {
stream.pipe(clientSocket).pipe(stream).on('close', function() {
conn.end();
});
} else
conn.end();
});
}).on('error', function(err) {
deny();
}).connect(ssh_config);
}).listen(1080, 'localhost', function() {
console.log('SOCKSv5 proxy server started on port 1080');
}).useAuth(socks.auth.None());
// test with cURL:
// curl -i --socks5 localhost:1080 google.com
```
### Invoke an arbitrary subsystem
```js
var Client = require('ssh2').Client;
var xmlhello = '<?xml version="1.0" encoding="UTF-8"?>' +
'<hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">' +
' <capabilities>' +
' <capability>urn:ietf:params:netconf:base:1.0</capability>' +
' </capabilities>' +
'</hello>]]>]]>';
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.subsys('netconf', function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
console.log(data);
}).write(xmlhello);
});
}).connect({
host: '1.2.3.4',
port: 22,
username: 'blargh',
password: 'honk'
});
```
## Server Examples
### Password and public key authentication and non-interactive (exec) command execution
```js
var fs = require('fs');
var crypto = require('crypto');
var inspect = require('util').inspect;
var buffersEqual = require('buffer-equal-constant-time');
var ssh2 = require('ssh2');
var utils = ssh2.utils;
var pubKey = utils.genPublicKey(utils.parseKey(fs.readFileSync('user.pub')));
new ssh2.Server({
hostKeys: [fs.readFileSync('host.key')]
}, function(client) {
console.log('Client connected!');
client.on('authentication', function(ctx) {
if (ctx.method === 'password'
// Note: Don't do this in production code, see
// https://www.brendanlong.com/timing-attacks-and-usernames.html
// In node v6.0.0+, you can use `crypto.timingSafeEqual()` to safely
// compare two values.
&& ctx.username === 'foo'
&& ctx.password === 'bar')
ctx.accept();
else if (ctx.method === 'publickey'
&& ctx.key.algo === pubKey.fulltype
&& buffersEqual(ctx.key.data, pubKey.public)) {
if (ctx.signature) {
var verifier = crypto.createVerify(ctx.sigAlgo);
verifier.update(ctx.blob);
if (verifier.verify(pubKey.publicOrig, ctx.signature))
ctx.accept();
else
ctx.reject();
} else {
// if no signature present, that means the client is just checking
// the validity of the given public key
ctx.accept();
}
} else
ctx.reject();
}).on('ready', function() {
console.log('Client authenticated!');
client.on('session', function(accept, reject) {
var session = accept();
session.once('exec', function(accept, reject, info) {
console.log('Client wants to execute: ' + inspect(info.command));
var stream = accept();
stream.stderr.write('Oh no, the dreaded errors!\n');
stream.write('Just kidding about the errors!\n');
stream.exit(0);
stream.end();
});
});
}).on('end', function() {
console.log('Client disconnected');
});
}).listen(0, '127.0.0.1', function() {
console.log('Listening on port ' + this.address().port);
});
```
### SFTP-only server
```js
var fs = require('fs');
var ssh2 = require('ssh2');
var OPEN_MODE = ssh2.SFTP_OPEN_MODE;
var STATUS_CODE = ssh2.SFTP_STATUS_CODE;
new ssh2.Server({
hostKeys: [fs.readFileSync('host.key')]
}, function(client) {
console.log('Client connected!');
client.on('authentication', function(ctx) {
if (ctx.method === 'password'
// Note: Don't do this in production code, see
// https://www.brendanlong.com/timing-attacks-and-usernames.html
// In node v6.0.0+, you can use `crypto.timingSafeEqual()` to safely
// compare two values.
&& ctx.username === 'foo'
&& ctx.password === 'bar')
ctx.accept();
else
ctx.reject();
}).on('ready', function() {
console.log('Client authenticated!');
client.on('session', function(accept, reject) {
var session = accept();
session.on('sftp', function(accept, reject) {
console.log('Client SFTP session');
var openFiles = {};
var handleCount = 0;
// `sftpStream` is an `SFTPStream` instance in server mode
// see: https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
var sftpStream = accept();
sftpStream.on('OPEN', function(reqid, filename, flags, attrs) {
// only allow opening /tmp/foo.txt for writing
if (filename !== '/tmp/foo.txt' || !(flags & OPEN_MODE.WRITE))
return sftpStream.status(reqid, STATUS_CODE.FAILURE);
// create a fake handle to return to the client, this could easily
// be a real file descriptor number for example if actually opening
// the file on the disk
var handle = new Buffer(4);
openFiles[handleCount] = true;
handle.writeUInt32BE(handleCount++, 0, true);
sftpStream.handle(reqid, handle);
console.log('Opening file for write')
}).on('WRITE', function(reqid, handle, offset, data) {
if (handle.length !== 4 || !openFiles[handle.readUInt32BE(0, true)])
return sftpStream.status(reqid, STATUS_CODE.FAILURE);
// fake the write
sftpStream.status(reqid, STATUS_CODE.OK);
var inspected = require('util').inspect(data);
console.log('Write to file at offset %d: %s', offset, inspected);
}).on('CLOSE', function(reqid, handle) {
var fnum;
if (handle.length !== 4 || !openFiles[(fnum = handle.readUInt32BE(0, true))])
return sftpStream.status(reqid, STATUS_CODE.FAILURE);
delete openFiles[fnum];
sftpStream.status(reqid, STATUS_CODE.OK);
console.log('Closing file');
});
});
});
}).on('end', function() {
console.log('Client disconnected');
});
}).listen(0, '127.0.0.1', function() {
console.log('Listening on port ' + this.address().port);
});
```
You can find more examples in the `examples` directory of this repository.
## API
`require('ssh2').Client` returns a **_Client_** constructor.
`require('ssh2').Server` returns a **_Server_** constructor.
`require('ssh2').utils` returns the [utility methods from `ssh2-streams`](https://github.com/mscdex/ssh2-streams#utility-methods).
`require('ssh2').SFTP_STATUS_CODE` returns the [`SFTPStream.STATUS_CODE` from `ssh2-streams`](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md#sftpstream-static-constants).
`require('ssh2').SFTP_OPEN_MODE` returns the [`SFTPStream.OPEN_MODE` from `ssh2-streams`](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md#sftpstream-static-constants).
### Client
#### Client events
* **banner**(< _string_ >message, < _string_ >language) - A notice was sent by the server upon connection.
* **ready**() - Authentication was successful.
* **tcp connection**(< _object_ >details, < _function_ >accept, < _function_ >reject) - An incoming forwarded TCP connection is being requested. Calling `accept` accepts the connection and returns a `Channel` object. Calling `reject` rejects the connection and no further action is needed. `details` contains:
* **srcIP** - _string_ - The originating IP of the connection.
* **srcPort** - _integer_ - The originating port of the connection.
* **destIP** - _string_ - The remote IP the connection was received on (given in earlier call to `forwardIn()`).
* **destPort** - _integer_ - The remote port the connection was received on (given in earlier call to `forwardIn()`).
* **x11**(< _object_ >details, < _function_ >accept, < _function_ >reject) - An incoming X11 connection is being requested. Calling `accept` accepts the connection and returns a `Channel` object. Calling `reject` rejects the connection and no further action is needed. `details` contains:
* **srcIP** - _string_ - The originating IP of the connection.
* **srcPort** - _integer_ - The originating port of the connection.
* **keyboard-interactive**(< _string_ >name, < _string_ >instructions, < _string_ >instructionsLang, < _array_ >prompts, < _function_ >finish) - The server is asking for replies to the given `prompts` for keyboard-interactive user authentication. `name` is generally what you'd use as a window title (for GUI apps). `prompts` is an array of `{ prompt: 'Password: ', echo: false }` style objects (here `echo` indicates whether user input should be displayed on the screen). The answers for all prompts must be provided as an array of strings and passed to `finish` when you are ready to continue. Note: It's possible for the server to come back and ask more questions.
* **unix connection**(< _object_ >details, < _function_ >accept, < _function_ >reject) - An incoming forwarded UNIX socket connection is being requested. Calling `accept` accepts the connection and returns a `Channel` object. Calling `reject` rejects the connection and no further action is needed. `details` contains:
* **socketPath** - _string_ - The originating UNIX socket path of the connection.
* **change password**(< _string_ >message, < _string_ >language, < _function_ >done) - If using password-based user authentication, the server has requested that the user's password be changed. Call `done` with the new password.
* **continue**() - Emitted when more requests/data can be sent to the server (after a `Client` method returned `false`).
* **error**(< _Error_ >err) - An error occurred. A 'level' property indicates 'client-socket' for socket-level errors and 'client-ssh' for SSH disconnection messages. In the case of 'client-ssh' messages, there may be a 'description' property that provides more detail.
* **end**() - The socket was disconnected.
* **close**(< _boolean_ >hadError) - The socket was closed. `hadError` is set to `true` if this was due to error.
#### Client methods
* **(constructor)**() - Creates and returns a new Client instance.
* **connect**(< _object_ >config) - _(void)_ - Attempts a connection to a server using the information given in `config`:
* **host** - _string_ - Hostname or IP address of the server. **Default:** `'localhost'`
* **port** - _integer_ - Port number of the server. **Default:** `22`
* **forceIPv4** - _boolean_ - Only connect via resolved IPv4 address for `host`. **Default:** `false`
* **forceIPv6** - _boolean_ - Only connect via resolved IPv6 address for `host`. **Default:** `false`
* **hostHash** - _string_ - 'md5' or 'sha1'. The host's key is hashed using this method and passed to the **hostVerifier** function. **Default:** (none)
* **hostVerifier** - _function_ - Function with parameters `(hashedKey[, callback])` where `hashedKey` is a string hex hash of the host's key for verification purposes. Return `true` to continue with the handshake or `false` to reject and disconnect, or call `callback()` with `true` or `false` if you need to perform asynchronous verification. **Default:** (auto-accept if `hostVerifier` is not set)
* **username** - _string_ - Username for authentication. **Default:** (none)
* **password** - _string_ - Password for password-based user authentication. **Default:** (none)
* **agent** - _string_ - Path to ssh-agent's UNIX socket for ssh-agent-based user authentication. **Windows users: set to 'pageant' for authenticating with Pageant or (actual) path to a cygwin "UNIX socket."** **Default:** (none)
* **agentForward** - _boolean_ - Set to `true` to use OpenSSH agent forwarding (`[email protected]`) for the life of the connection. `agent` must also be set to use this feature. **Default:** `false`
* **privateKey** - _mixed_ - _Buffer_ or _string_ that contains a private key for either key-based or hostbased user authentication (OpenSSH format). **Default:** (none)
* **passphrase** - _string_ - For an encrypted private key, this is the passphrase used to decrypt it. **Default:** (none)
* **localHostname** - _string_ - Along with **localUsername** and **privateKey**, set this to a non-empty string for hostbased user authentication. **Default:** (none)
* **localUsername** - _string_ - Along with **localHostname** and **privateKey**, set this to a non-empty string for hostbased user authentication. **Default:** (none)
* **tryKeyboard** - _boolean_ - Try keyboard-interactive user authentication if primary user authentication method fails. If you set this to `true`, you need to handle the `keyboard-interactive` event. **Default:** `false`
* **keepaliveInterval** - _integer_ - How often (in milliseconds) to send SSH-level keepalive packets to the server (in a similar way as OpenSSH's ServerAliveInterval config option). Set to 0 to disable. **Default:** `0`
* **keepaliveCountMax** - _integer_ - How many consecutive, unanswered SSH-level keepalive packets that can be sent to the server before disconnection (similar to OpenSSH's ServerAliveCountMax config option). **Default:** `3`
* **readyTimeout** - _integer_ - How long (in milliseconds) to wait for the SSH handshake to complete. **Default:** `20000`
* **sock** - _ReadableStream_ - A _ReadableStream_ to use for communicating with the server instead of creating and using a new TCP connection (useful for connection hopping).
* **strictVendor** - _boolean_ - Performs a strict server vendor check before sending vendor-specific requests, etc. (e.g. check for OpenSSH server when using `openssh_noMoreSessions()`) **Default:** `true`
* **algorithms** - _object_ - This option allows you to explicitly override the default transport layer algorithms used for the connection. Each value must be an array of valid algorithms for that category. The order of the algorithms in the arrays are important, with the most favorable being first. For a list of valid and default algorithm names, please review the documentation for the version of `ssh2-streams` used by this module. Valid keys:
* **kex** - _array_ - Key exchange algorithms.
* **cipher** - _array_ - Ciphers.
* **serverHostKey** - _array_ - Server host key formats.
* **hmac** - _array_ - (H)MAC algorithms.
* **compress** - _array_ - Compression algorithms.
* **compress** - _mixed_ - Set to `true` to enable compression if server supports it, `'force'` to force compression (disconnecting if server does not support it), or `false` to explicitly opt out of compression all of the time. Note: this setting is overridden when explicitly setting a compression algorithm in the `algorithms` configuration option. **Default:** (only use compression if that is only what the server supports)
* **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none)
**Authentication method priorities:** None -> Password -> Private Key -> Agent (-> keyboard-interactive if `tryKeyboard` is `true`) -> Hostbased
* **exec**(< _string_ >command[, < _object_ >options], < _function_ >callback) - _boolean_ - Executes `command` on the server. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Valid `options` properties are:
* **env** - _object_ - An environment to use for the execution of the command.
* **pty** - _mixed_ - Set to `true` to allocate a pseudo-tty with defaults, or an object containing specific pseudo-tty settings (see 'Pseudo-TTY settings'). Setting up a pseudo-tty can be useful when working with remote processes that expect input from an actual terminal (e.g. sudo's password prompt).
* **x11** - _mixed_ - Set to `true` to use defaults below, set to a number to specify a specific screen number, or an object with the following valid properties:
* **single** - _boolean_ - Allow just a single connection? **Default:** `false`
* **screen** - _number_ - Screen number to use **Default:** `0`
* **shell**([[< _mixed_ >window,] < _object_ >options]< _function_ >callback) - _boolean_ - Starts an interactive shell session on the server, with an optional `window` object containing pseudo-tty settings (see 'Pseudo-TTY settings'). If `window === false`, then no pseudo-tty is allocated. `options` supports the `x11` and `env` options as described in `exec()`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **forwardIn**(< _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Bind to `remoteAddr` on `remotePort` on the server and forward incoming TCP connections. `callback` has 2 parameters: < _Error_ >err, < _integer_ >port (`port` is the assigned port number if `remotePort` was 0). Returns `false` if you should wait for the `continue` event before sending any more traffic. Here are some special values for `remoteAddr` and their associated binding behaviors:
* '' - Connections are to be accepted on all protocol families supported by the server.
* '0.0.0.0' - Listen on all IPv4 addresses.
* '::' - Listen on all IPv6 addresses.
* 'localhost' - Listen on all protocol families supported by the server on loopback addresses only.
* '127.0.0.1' and '::1' - Listen on the loopback interfaces for IPv4 and IPv6, respectively.
* **unforwardIn**(< _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Unbind from `remoteAddr` on `remotePort` on the server and stop forwarding incoming TCP connections. Until `callback` is called, more connections may still come in. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **forwardOut**(< _string_ >srcIP, < _integer_ >srcPort, < _string_ >dstIP, < _integer_ >dstPort, < _function_ >callback) - _boolean_ - Open a connection with `srcIP` and `srcPort` as the originating address and port and `dstIP` and `dstPort` as the remote destination address and port. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **sftp**(< _function_ >callback) - _boolean_ - Starts an SFTP session. `callback` has 2 parameters: < _Error_ >err, < _SFTPStream_ >sftp. For methods available on `sftp`, see the [`SFTPStream` client documentation](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md) (except `read()` and `write()` are used instead of `readData()` and `writeData()` respectively, for convenience). Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **subsys**(< _string_ >subsystem, < _function_ >callback) - _boolean_ - Invokes `subsystem` on the server. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **end**() - _(void)_ - Disconnects the socket.
* **openssh_noMoreSessions**(< _function_ >callback) - _boolean_ - OpenSSH extension that sends a request to reject any new sessions (e.g. exec, shell, sftp, subsys) for this connection. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **openssh_forwardInStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that binds to a UNIX domain socket at `socketPath` on the server and forwards incoming connections. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **openssh_unforwardInStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that unbinds from a UNIX domain socket at `socketPath` on the server and stops forwarding incoming connections. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **openssh_forwardOutStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that opens a connection to a UNIX domain socket at `socketPath` on the server. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
### Server
#### Server events
* **connection**(< _Connection_ >client, < _object_ >info) - A new client has connected. `info` contains the following properties:
* **ip** - _string_ - The remoteAddress of the connection.
* **header** - _object_ - Information about the client's header:
* **identRaw** - _string_ - The raw client identification string.
* **versions** - _object_ - Various version information:
* **protocol** - _string_ - The SSH protocol version (always `1.99` or `2.0`).
* **software** - _string_ - The software name and version of the client.
* **comments** - _string_ - Any text that comes after the software name/version.
Example: the identification string `SSH-2.0-OpenSSH_6.6.1p1 Ubuntu-2ubuntu2` would be parsed as:
```js
{ identRaw: 'SSH-2.0-OpenSSH_6.6.1p1 Ubuntu-2ubuntu2',
version: {
protocol: '2.0',
software: 'OpenSSH_6.6.1p1'
},
comments: 'Ubuntu-2ubuntu2' }
```
#### Server methods
* **(constructor)**(< _object_ >config[, < _function_ >connectionListener]) - Creates and returns a new Server instance. Server instances also have the same methods/properties/events as [`net.Server`](http://nodejs.org/docs/latest/api/net.html#net_class_net_server). `connectionListener` if supplied, is added as a `connection` listener. Valid `config` properties:
* **hostKeys** - _array_ - An array of either Buffers/strings that contain host private keys or objects in the format of `{ key: <Buffer/string>, passphrase: <string> }` for encrypted private keys. (**Required**) **Default:** (none)
* **algorithms** - _object_ - This option allows you to explicitly override the default transport layer algorithms used for incoming client connections. Each value must be an array of valid algorithms for that category. The order of the algorithms in the arrays are important, with the most favorable being first. For a list of valid and default algorithm names, please review the documentation for the version of `ssh2-streams` used by this module. Valid keys:
* **kex** - _array_ - Key exchange algorithms.
* **cipher** - _array_ - Ciphers.
* **serverHostKey** - _array_ - Server host key formats.
* **hmac** - _array_ - (H)MAC algorithms.
* **compress** - _array_ - Compression algorithms.
* **greeting** - _string_ - A message that is sent to clients immediately upon connection, before handshaking begins. **Note:** Most clients usually ignore this. **Default:** (none)
* **banner** - _string_ - A message that is sent to clients once, right before authentication begins. **Default:** (none)
* **ident** - _string_ - A custom server software name/version identifier. **Default:** `'ssh2js' + moduleVersion + 'srv'`
* **highWaterMark** - _integer_ - This is the `highWaterMark` to use for the parser stream. **Default:** `32 * 1024`
* **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none)
#### Connection events
* **authentication**(< _AuthContext_ >ctx) - The client has requested authentication. `ctx.username` contains the client username, `ctx.method` contains the requested authentication method, and `ctx.accept()` and `ctx.reject([< Array >authMethodsLeft[, < Boolean >isPartialSuccess]])` are used to accept or reject the authentication request respectively. `abort` is emitted if the client aborts the authentication request. Other properties/methods available on `ctx` depends on the `ctx.method` of authentication the client has requested:
* `password`:
* **password** - _string_ - This is the password sent by the client.
* `publickey`:
* **key** - _object_ - Contains information about the public key sent by the client:
* **algo** - _string_ - The name of the key algorithm (e.g. `ssh-rsa`).
* **data** - _Buffer_ - The actual key data.
* **sigAlgo** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _string_, then this contains the signature algorithm that is passed to [`crypto.createVerify()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_crypto_createverify_algorithm).
* **blob** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _Buffer_, then this contains the data that is passed to [`verifier.update()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_verifier_update_data).
* **signature** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _Buffer_, then this contains a signature that is passed to [`verifier.verify()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_verifier_verify_object_signature_signature_format).
* `keyboard-interactive`:
* **submethods** - _array_ - A list of preferred authentication "sub-methods" sent by the client. This may be used to determine what (if any) prompts to send to the client.
* **prompt**(< _array_ >prompts[, < _string_ >title[, < _string_ >instructions]], < _function_ >callback) - _boolean_ - Send prompts to the client. `prompts` is an array of `{ prompt: 'Prompt text', echo: true }` objects (`prompt` being the prompt text and `echo` indicating whether the client's response to the prompt should be echoed to their display). `callback` is called with `(err, responses)`, where `responses` is an array of string responses matching up to the `prompts`.
* **ready**() - Emitted when the client has been successfully authenticated.
* **session**(< _function_ >accept, < _function_ >reject) - Emitted when the client has requested a new session. Sessions are used to start interactive shells, execute commands, request X11 forwarding, etc. `accept()` returns a new _Session_ instance. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **tcpip**(< _function_ >accept, < _function_ >reject, < _object_ >info) - Emitted when the client has requested an outbound (TCP) connection. `accept()` returns a new _Channel_ instance representing the connection. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` contains:
* **srcIP** - _string_ - Source IP address of outgoing connection.
* **srcPort** - _string_ - Source port of outgoing connection.
* **destIP** - _string_ - Destination IP address of outgoing connection.
* **destPort** - _string_ - Destination port of outgoing connection.
* **openssh.streamlocal**(< _function_ >accept, < _function_ >reject, < _object_ >info) - Emitted when the client has requested a connection to a UNIX domain socket. `accept()` returns a new _Channel_ instance representing the connection. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` contains:
* **socketPath** - _string_ - Destination socket path of outgoing connection.
* **request**(< _mixed_ >accept, < _mixed_ >reject, < _string_ >name, < _object_ >info) - Emitted when the client has sent a global request for `name` (e.g. `tcpip-forward` or `cancel-tcpip-forward`). `accept` and `reject` are functions if the client requested a response. If `bindPort === 0`, you should pass the chosen port to `accept()` so that the client will know what port was bound. `info` contains additional details about the request:
* `tcpip-forward` and `cancel-tcpip-forward`:
* **bindAddr** - _string_ - The IP address to start/stop binding to.
* **bindPort** - _integer_ - The port to start/stop binding to.
* `[email protected]` and `[email protected]`:
* **socketPath** - _string_ - The socket path to start/stop binding to.
* **rekey**() - Emitted when the client has finished rekeying (either client or server initiated).
* **continue**() - Emitted when more requests/data can be sent to the client (after a `Connection` method returned `false`).
* **error**(< _Error_ >err) - An error occurred.
* **end**() - The client socket disconnected.
* **close**(< _boolean_ >hadError) - The client socket was closed. `hadError` is set to `true` if this was due to error.
#### Connection methods
* **end**() - _boolean_ - Closes the client connection. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **x11**(< _string_ >originAddr, < _integer_ >originPort, < _function_ >callback) - _boolean_ - Alert the client of an incoming X11 client connection from `originAddr` on port `originPort`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **forwardOut**(< _string_ >boundAddr, < _integer_ >boundPort, < _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Alert the client of an incoming TCP connection on `boundAddr` on port `boundPort` from `remoteAddr` on port `remotePort`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **openssh_forwardOutStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - Alert the client of an incoming UNIX domain socket connection on `socketPath`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **rekey**([< _function_ >callback]) - _boolean_ - Initiates a rekeying with the client. If `callback` is supplied, it is added as a one-time handler for the `rekey` event. Returns `false` if you should wait for the `continue` event before sending any more traffic.
#### Session events
* **pty**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested allocation of a pseudo-TTY for this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **cols** - _integer_ - The number of columns for the pseudo-TTY.
* **rows** - _integer_ - The number of rows for the pseudo-TTY.
* **width** - _integer_ - The width of the pseudo-TTY in pixels.
* **height** - _integer_ - The height of the pseudo-TTY in pixels.
* **modes** - _object_ - Contains the requested terminal modes of the pseudo-TTY keyed on the mode name with the value being the mode argument. (See the table at the end for valid names).
* **window-change**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client reported a change in window dimensions during this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **cols** - _integer_ - The new number of columns for the client window.
* **rows** - _integer_ - The new number of rows for the client window.
* **width** - _integer_ - The new width of the client window in pixels.
* **height** - _integer_ - The new height of the client window in pixels.
* **x11**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested X11 forwarding. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **single** - _boolean_ - `true` if only a single connection should be forwarded.
* **protocol** - _string_ - The name of the X11 authentication method used (e.g. `MIT-MAGIC-COOKIE-1`).
* **cookie** - _string_ - The X11 authentication cookie encoded in hexadecimal.
* **screen** - _integer_ - The screen number to forward X11 connections for.
* **env**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested an environment variable to be set for this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **key** - _string_ - The environment variable's name.
* **value** - _string_ - The environment variable's value.
* **signal**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has sent a signal. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **name** - _string_ - The signal name (e.g. `SIGUSR1`).
* **auth-agent**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested incoming ssh-agent requests be forwarded to them. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic.
* **shell**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested an interactive shell. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the interactive shell. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **exec**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has requested execution of a command string. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the command execution. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **command** - _string_ - The command line to be executed.
* **sftp**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested the SFTP subsystem. `accept` and `reject` are functions if the client requested a response. `accept()` returns an _SFTPStream_ in server mode (see the [`SFTPStream` documentation](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md) for details). `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **subsystem**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has requested an arbitrary subsystem. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the subsystem. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties:
* **name** - _string_ - The name of the subsystem.
* **close**() - The session was closed.
### Channel
This is a normal **streams2** Duplex Stream (used both by clients and servers), with the following changes:
* A boolean property `allowHalfOpen` exists and behaves similarly to the property of the same name for `net.Socket`. When the stream's end() is called, if `allowHalfOpen` is `true`, only EOF will be sent (the server can still send data if they have not already sent EOF). The default value for this property is `true`.
* A `close` event is emitted once the channel is completely closed on both the client and server.
* Client-specific:
* For exec():
* An `exit` event *may* (the SSH2 spec says it is optional) be emitted when the process finishes. If the process finished normally, the process's return value is passed to the `exit` callback. If the process was interrupted by a signal, the following are passed to the `exit` callback: null, < _string_ >signalName, < _boolean_ >didCoreDump, < _string_ >description.
* If there was an `exit` event, the `close` event will be passed the same arguments for convenience.
* For shell() and exec():
* The readable side represents stdout and the writable side represents stdin.
* A `stderr` property contains a Readable stream that represents output from stderr.
* **signal**(< _string_ >signalName) - _boolean_ - Sends a POSIX signal to the current process on the server. Valid signal names are: 'ABRT', 'ALRM', 'FPE', 'HUP', 'ILL', 'INT', 'KILL', 'PIPE', 'QUIT', 'SEGV', 'TERM', 'USR1', and 'USR2'. Some server implementations may ignore this request if they do not support signals. Note: If you are trying to send SIGINT and you find `signal()` doesn't work, try writing `'\x03'` to the Channel stream instead. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **setWindow**(< _integer_ >rows, < _integer_ >cols, < _integer_ >height, < _integer_ >width) - _boolean_ - Lets the server know that the local terminal window has been resized. The meaning of these arguments are described in the 'Pseudo-TTY settings' section. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* Server-specific:
* For exec-enabled channel instances there is an additional method available that may be called right before you close the channel. It has two different signatures:
* **exit**(< _integer_ >exitCode) - _boolean_ - Sends an exit status code to the client. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* **exit**(< _string_ >signalName[, < _boolean_ >coreDumped[, < _string_ >errorMsg]]) - _boolean_ - Sends an exit status code to the client. Returns `false` if you should wait for the `continue` event before sending any more traffic.
* For exec and shell-enabled channel instances, `channel.stderr` is a writable stream.
### Pseudo-TTY settings
* **rows** - < _integer_ > - Number of rows. **Default:** `24`
* **cols** - < _integer_ > - Number of columns. **Default:** `80`
* **height** - < _integer_ > - Height in pixels. **Default:** `480`
* **width** - < _integer_ > - Width in pixels. **Default:** `640`
* **term** - < _string_ > - The value to use for $TERM. **Default:** `'vt100'`
`rows` and `cols` override `width` and `height` when `rows` and `cols` are non-zero.
Pixel dimensions refer to the drawable area of the window.
Zero dimension parameters are ignored.
### Terminal modes
Name | Description
-------------- | ------------
VINTR | Interrupt character; 255 if none. Similarly for the other characters. Not all of these characters are supported on all systems.
VQUIT | The quit character (sends SIGQUIT signal on POSIX systems).
VERASE | Erase the character to left of the cursor.
VKILL | Kill the current input line.
VEOF | End-of-file character (sends EOF from the terminal).
VEOL | End-of-line character in addition to carriage return and/or linefeed.
VEOL2 | Additional end-of-line character.
VSTART | Continues paused output (normally control-Q).
VSTOP | Pauses output (normally control-S).
VSUSP | Suspends the current program.
VDSUSP | Another suspend character.
VREPRINT | Reprints the current input line.
VWERASE | Erases a word left of cursor.
VLNEXT | Enter the next character typed literally, even if it is a special character
VFLUSH | Character to flush output.
VSWTCH | Switch to a different shell layer.
VSTATUS | Prints system status line (load, command, pid, etc).
VDISCARD | Toggles the flushing of terminal output.
IGNPAR | The ignore parity flag. The parameter SHOULD be 0 if this flag is FALSE, and 1 if it is TRUE.
PARMRK | Mark parity and framing errors.
INPCK | Enable checking of parity errors.
ISTRIP | Strip 8th bit off characters.
INLCR | Map NL into CR on input.
IGNCR | Ignore CR on input.
ICRNL | Map CR to NL on input.
IUCLC | Translate uppercase characters to lowercase.
IXON | Enable output flow control.
IXANY | Any char will restart after stop.
IXOFF | Enable input flow control.
IMAXBEL | Ring bell on input queue full.
ISIG | Enable signals INTR, QUIT, [D]SUSP.
ICANON | Canonicalize input lines.
XCASE | Enable input and output of uppercase characters by preceding their lowercase equivalents with "\".
ECHO | Enable echoing.
ECHOE | Visually erase chars.
ECHOK | Kill character discards current line.
ECHONL | Echo NL even if ECHO is off.
NOFLSH | Don't flush after interrupt.
TOSTOP | Stop background jobs from output.
IEXTEN | Enable extensions.
ECHOCTL | Echo control characters as ^(Char).
ECHOKE | Visual erase for line kill.
PENDIN | Retype pending input.
OPOST | Enable output processing.
OLCUC | Convert lowercase to uppercase.
ONLCR | Map NL to CR-NL.
OCRNL | Translate carriage return to newline (output).
ONOCR | Translate newline to carriage return-newline (output).
ONLRET | Newline performs a carriage return (output).
CS7 | 7 bit mode.
CS8 | 8 bit mode.
PARENB | Parity enable.
PARODD | Odd parity, else even.
TTY_OP_ISPEED | Specifies the input baud rate in bits per second.
TTY_OP_OSPEED | Specifies the output baud rate in bits per second.
|
PypiClean
|
/fastlane_bot-2.7.19-py3-none-any.whl/fastlane_bot/modes/triangle_multi.py
|
from typing import List, Any, Tuple, Union
from fastlane_bot.modes.base_triangle import ArbitrageFinderTriangleBase
from fastlane_bot.tools.cpc import CPCContainer
from fastlane_bot.tools.optimizer import MargPOptimizer
class ArbitrageFinderTriangleMulti(ArbitrageFinderTriangleBase):
"""
Triangular arbitrage finder mode
"""
arb_mode = "multi_triangle"
def find_arbitrage(self, candidates: List[Any] = None, ops: Tuple = None, best_profit: float = 0, profit_src: float = 0) -> Union[List, Tuple]:
"""
see base.py
"""
if self.base_exchange != "carbon_v1":
raise ValueError("base_exchange must be carbon_v1 for `multi` mode")
if candidates is None:
candidates = []
combos = self.get_combos(
self.flashloan_tokens, self.CCm, arb_mode=self.arb_mode
)
for src_token, miniverse in combos:
r = None
CC_cc = CPCContainer(miniverse)
O = MargPOptimizer(CC_cc)
try:
r = O.margp_optimizer(src_token)
trade_instructions_df = r.trade_instructions(O.TIF_DFAGGR)
trade_instructions_dic = r.trade_instructions(O.TIF_DICTS)
trade_instructions = r.trade_instructions()
"""
The following handles an edge case until parallel execution is available:
1 Determine correct direction - opposite of non-Carbon pool
2 Get cids of wrong-direction Carbon pools
3 Create new CPCContainer with correct pools
4 Rerun optimizer
5 Resume normal flow
"""
non_carbon_cids = [
curve.cid
for curve in miniverse
if curve.params.get("exchange") != "carbon_v1"
]
non_carbon_row = trade_instructions_df.loc[non_carbon_cids[0]]
tkn0_into_carbon = non_carbon_row[0] < 0
wrong_direction_cids = [
idx
for idx, row in trade_instructions_df.iterrows()
if (
(tkn0_into_carbon and row[0] < 0)
or (not tkn0_into_carbon and row[0] > 0)
)
and ("-0" in idx or "-1" in idx)
]
if non_carbon_cids and len(wrong_direction_cids) > 0:
self.ConfigObj.logger.debug(
f"\n\nRemoving wrong direction pools & rerunning optimizer\ntrade_instructions_df before: {trade_instructions_df.to_string()}"
)
new_curves = [
curve
for curve in miniverse
if curve.cid not in wrong_direction_cids
]
# Rerun main flow with the new set of curves
CC_cc = CPCContainer(new_curves)
O = MargPOptimizer(CC_cc)
r = O.margp_optimizer(src_token)
profit_src = -r.result
trade_instructions_df = r.trade_instructions(O.TIF_DFAGGR)
trade_instructions_dic = r.trade_instructions(O.TIF_DICTS)
trade_instructions = r.trade_instructions()
except Exception as e:
continue
# Get the cids
cids = [ti["cid"] for ti in trade_instructions_dic]
# Calculate the profit
profit = self.calculate_profit(src_token, profit_src, self.CCm, cids)
if str(profit) == "nan":
self.ConfigObj.logger.debug("profit is nan, skipping")
continue
# Handle candidates based on conditions
candidates += self.handle_candidates(
best_profit,
profit,
trade_instructions_df,
trade_instructions_dic,
src_token,
trade_instructions,
)
# Find the best operations
best_profit, ops = self.find_best_operations(
best_profit,
ops,
profit,
trade_instructions_df,
trade_instructions_dic,
src_token,
trade_instructions,
)
return candidates if self.result == self.AO_CANDIDATES else ops
|
PypiClean
|
/cmus-notify-1.4.1.tar.gz/cmus-notify-1.4.1/cmus_notify/options.py
|
from argparse import (RawDescriptionHelpFormatter,
ArgumentParser,
SUPPRESS)
from .constants import DEFAULT_CONFIGURATION_FILE
def parse_arguments():
"""Create an :class:`argparse.ArgumentParser` and parse the command-line arguments."""
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
prog='cmus_notify',
description='Display a notification about Cmus\'s '
'current status',
epilog='Format String Parameters\n'
'========================\n\n'
'The available arguments to the format '
'strings are the following:\n\n'
' - album: The song\'s album\n'
' - artist: The song\'s artist\n'
' - date: The song\'s release date\n'
' - discnumber: The song\'s disc\'s number\n'
' - duration: The song\'s duration\n'
' - file: The song\'s file\'s path\n'
' - status: Cmus current status\n'
' - title: The song\'s title\n'
' - tracknumber: The song\'s track number')
parser.add_argument('parse',
type=str,
metavar='INFORMATION',
help='Parse the given information')
parser.add_argument('-a',
'--application_name',
default=SUPPRESS,
type=str,
help='The name of the application')
parser.add_argument('-b',
'--body',
required=False,
metavar='BODY_FORMAT_STRING',
default=SUPPRESS,
type=str,
help='A format string that can be specified to tell the'
' software how to format the body. The syntax is '
'the same as Python\'s. The available options are '
'specified at the end of this help message. (i.e. '
'\'Artist: {artist}\')')
parser.add_argument('-t',
'--title',
required=False,
metavar='TITLE_FORMAT_STRING',
default=SUPPRESS,
type=str,
help='A format string that can be specified to tell the'
' software how to format the title. The syntax is '
'the same as Python\'s. The available options are '
'specified at the end of this help message. (i.e. '
'\'Now playing: {title}\')')
parser.add_argument('-f',
'--configuration_file',
required=False,
default=DEFAULT_CONFIGURATION_FILE,
type=str,
help='The path to the configuration file. If it is not '
'specified, the program will use the default '
'values of the other options.')
parser.add_argument('-c',
'--custom_notification',
required=False,
default=SUPPRESS,
type=str,
help='The path to a custom implementation of the '
'notification class. If it is not specified, the '
'standard implementation will be used (the one using '
'notify2).')
return parser.parse_args()
|
PypiClean
|
/wavestate.model-0.1.1.tar.gz/wavestate.model-0.1.1/src/wavestate/model/system/algo_alm/mm_overlapper.py
|
import numpy as np
from .plot_alm import OverlapperPlotter
from wavestate.bunch import Bunch
from . import mm_annotate
from wavestate.utilities.strings import table
from ... import optics
class ModeMatchingOverlapper(object):
plotter = OverlapperPlotter()
def __init__(
self,
algo_pa,
algo_mm,
targetsB_to,
targetsB_fr,
oLp_path_center,
Wk,
branching,
pbg=None,
shifts_use=False,
):
self.pa = algo_pa
self.mm = algo_mm
self.targetsB_to = targetsB_to
self.targetsB_fr = targetsB_fr
self.oLp_path_center = oLp_path_center
self.target1 = None
self.target2 = None
self.Wk = Wk
self.branching = branching
self.shifts_use = shifts_use
if pbg is None:
self.pbg = self.mm.pbg
else:
self.pbg = pbg
self.trans_center = self.mm._path_transporters(self.oLp_path_center, Wk=Wk, shifts_use=shifts_use)
def setup_refer_to_start(tB, direction):
tB_new = Bunch()
tB_new.type = tB.tspecB
if tB.inv_start:
tB_new.targB = self.mm._target_complete(tB.tspecB, ol=tB.oLp_path[-1], shifts_use=shifts_use)
else:
tB_new.targB = self.mm._target_complete(tB.tspecB, ol=tB.oLp_path[0], shifts_use=shifts_use)
if not tB.inv_start and tB_new.type == 'cavity':
shiftsX = tB_new.targB.cav_shiftX
shiftsY = tB_new.targB.cav_shiftY
else:
# TODO, allow reverse shifts
shiftsX = {}
shiftsY = {}
tB_new.trans = self.mm._path_transporters(tB.oLp_path, Wk=Wk, shifts_use=shifts_use)
matXfr = tB_new.trans.X.full_trip_mat
matYfr = tB_new.trans.Y.full_trip_mat
if tB.inv_start:
matXfr = np.linalg.inv(matXfr)
matYfr = np.linalg.inv(matYfr)
else:
shiftsX = tB_new.trans.X.shifts_out(shiftsX)
shiftsY = tB_new.trans.Y.shifts_out(shiftsY)
tB_new.inv_start = tB.inv_start
tB_new.type = direction
if direction == "from":
pass
elif direction == "to":
matXfr = np.linalg.inv(self.trans_center.X.full_trip_mat) @ matXfr
matYfr = np.linalg.inv(self.trans_center.Y.full_trip_mat) @ matYfr
else:
raise RuntimeError("Bad Direction")
tB_new.qX = tB_new.targB.qX.propagate_matrix(matXfr)
tB_new.qY = tB_new.targB.qY.propagate_matrix(matYfr)
tB_new.qXend = tB_new.qX.propagate_matrix(self.trans_center.X.full_trip_mat)
tB_new.qYend = tB_new.qY.propagate_matrix(self.trans_center.Y.full_trip_mat)
if not tB.inv_start:
shiftsX = self.trans_center.X.shifts_out(shiftsX)
shiftsY = self.trans_center.Y.shifts_out(shiftsY)
tB_new.shiftsXend = shiftsX
tB_new.shiftsYend = shiftsY
return tB_new
transB_fr = dict()
# overlap is referred to the start of the path
# TODO, collapse the logic of both of these to a single function call
for t_fr, frB in targetsB_fr.items():
transB_fr[t_fr] = setup_refer_to_start(frB, direction="from")
transB_to = dict()
for t_to, toB in targetsB_to.items():
transB_to[t_to] = setup_refer_to_start(toB, direction="to")
self.transB_to = transB_to
self.transB_fr = transB_fr
return
def rebuild(self, pbg=None, overrides=None):
if pbg is None:
pbg = self.pbg
if overrides is not None:
pbg = pbg.copy()
for override_param, override_val in overrides.items():
pbg.override_value(override_param, override_val)
olap = self.__class__(
algo_pa=self.pa,
algo_mm=self.mm,
targetsB_to=self.targetsB_to,
targetsB_fr=self.targetsB_fr,
oLp_path_center=self.oLp_path_center,
Wk=self.Wk,
branching=self.branching,
pbg=pbg,
)
olap.target1 = self.target1
olap.target2 = self.target2
return olap
def propagate_reference(self, target, name=None):
raise NotImplementedError("Not sure this is working as expected")
if name is None:
name = target + " reference"
if target in self.transB_fr:
self.target1 = target
self.target2 = name
frB = self.transB_fr[target]
toB_new = Bunch()
# makes a null transfer
toB_new.trans = self.mm._path_transporters([], Wk=self.Wk, shifts_use=self.shifts_use)
qX = frB.qX.propagate_matrix(self.trans_center.X.full_trip_mat)
qY = frB.qY.propagate_matrix(self.trans_center.Y.full_trip_mat)
toB_new.qX = qX
toB_new.qY = qY
toB_new.targB = Bunch()
toB_new.targB.type = "specified"
toB_new.targB.qX = qX
toB_new.targB.qY = qY
toB_new.type = "to"
toB_new.inv_start = not frB.inv_start
self.transB_to[name] = toB_new
else:
toB = self.transB_to[target]
self.target2 = target
self.target1 = name
frB_new = Bunch()
# makes a null transfer
frB_new.trans = self.mm._path_transporters([], Wk=self.Wk, shifts_use=self.shifts_use)
qX = toB.qX
qY = toB.qY
frB_new.qX = qX
frB_new.qY = qY
frB_new.targB = Bunch()
frB_new.targB.type = "specified"
frB_new.targB.qX = qX
frB_new.targB.qY = qY
frB_new.inv_start = not toB.inv_start
frB_new.type = "from"
self.transB_fr[name] = frB_new
return
def set_targets(self, target1, target2):
self.target1 = target1
self.target2 = target2
return
def target_list(self):
targets_set = set(self.targetsB_fr.keys())
targets_set.update(self.targetsB_to.keys())
targets = []
if self.target1 is not None:
targets_set.remove(self.target1)
targets.append(self.target1)
if self.target2 is not None:
targets_set.remove(self.target2)
targets.append(self.target2)
targets.extend(sorted(targets_set))
return targets
def object_z(self, ref, obj=None):
# TODO
lset = self.mm.bg.rAp2oLp_set(ref, obj=obj)
print("lset", lset)
# TODO, specify X or Y or check that it doesn't matter
d = self.trans_center.X.ol2z(lset)
return min(d.values())
@property
def length_m(self):
return self.trans_center.X.full_trip_length
def __getitem__(self, tname):
"""
Indexing the overlapper by a target name
returns the bunch containing the target information
"""
ret = self.transB_fr.get(tname, None)
if ret is not None:
return ret
return self.transB_to[tname]
def z2target_qY(self, tname, Z):
tB = self[tname]
mat = self.trans_center.Y.z2mat(Z)
return tB.qY.propagate_matrix(mat)
def z2target_qX(self, tname, Z):
tB = self[tname]
mat = self.trans_center.X.z2mat(Z)
return tB.qX.propagate_matrix(mat)
@property
def overlapX_field(self):
frB = self.transB_fr[self.target1]
toB = self.transB_to[self.target2]
return frB.qX.overlap_HG(toB.qX)
@property
def overlapY_field(self):
frB = self.transB_fr[self.target1]
toB = self.transB_to[self.target2]
return frB.qY.overlap_HG(toB.qY)
@property
def overlap_field(self):
try:
frB = self.transB_fr[self.target1]
except KeyError:
frB = self.transB_to[self.target1]
try:
toB = self.transB_to[self.target2]
except KeyError:
toB = self.transB_fr[self.target2]
return frB.qX.overlap_HG(toB.qX) * frB.qY.overlap_HG(toB.qY)
@property
def overlap(self):
return abs(self.overlap_field) ** 2
def plot(self, fname=None, *args, **kwargs):
return self.plotter.plot(*args, overlapper=self, fname=fname, **kwargs)
def plot_scan(self, fname=None, *args, **kwargs):
return self.plotter.plot_scan(*args, overlapper=self, fname=fname, **kwargs)
def plot_descriptions(
self,
axis="Y",
waists_target=None,
reverse=False,
tags=[],
):
"""
tags is a list of dictionaries that look like
dict(
obj = '/path/to/obj' or obj,
to = distance[m], or
fr = distance[m],
label = "label W={W}, Gouy={gouy}deg ..."
)
"""
descriptions = self.object_descriptions(
axis=axis,
waists_target=waists_target,
)
descriptions = mm_annotate.annotate_tags(self.pbg, descriptions, tags)
descriptions = mm_annotate.annotate_waists(descriptions)
descriptions = mm_annotate.annotate_qspaces(descriptions, reverse=reverse)
descriptions = mm_annotate.annotate_clearspaces(descriptions)
self._plot_descriptions = descriptions
return self._plot_descriptions
def annotation_target(self, waists_target=None):
if waists_target is None:
if self.target1 is not None:
waists_target = 1
else:
waists_target = 2
if waists_target == 1:
waists_target = self.target1
elif waists_target == 2:
waists_target = self.target2
return waists_target
def object_descriptions(
self,
axis="Y",
waists_target=None,
):
waists_target = self.annotation_target(waists_target)
tB = self[waists_target]
if axis.lower() == "y":
descriptions = mm_annotate.transporter2objects(
self.mm,
tB.qY,
self.trans_center.Y,
)
elif axis.lower() == "x":
descriptions = mm_annotate.transporter2objects(
self.mm,
tB.qX,
self.trans_center.X,
)
else:
raise RuntimeError("Unrecognized Axis")
return descriptions
def overlap_field_between(self, target1, target2):
try:
frB = self.transB_fr[target1]
except KeyError:
frB = self.transB_to[target1]
try:
toB = self.transB_to[target2]
except KeyError:
toB = self.transB_fr[target2]
return frB.qX.overlap_HG(toB.qX) * frB.qY.overlap_HG(toB.qY)
def overlap_table(self, first=(), last=()):
"""
First and last modify the ordering
"""
targets = list(self.transB_fr.keys()) + list(self.transB_to.keys())
firsts = []
mids = []
lasts = []
for idx, targ in enumerate(targets):
for f_ in first:
if f_ in targ:
firsts.append(targ)
break
else:
for f_ in last:
if f_ in targ:
lasts.append(targ)
break
else:
mids.append(targ)
targets = firsts[::-1] + mids + lasts
olaps = []
for t1 in targets:
olaps2 = []
for t2 in targets:
olaps2.append(abs(self.overlap_field_between(t1, t2)) ** 2)
olaps.append(olaps2)
alpha = "ABCDEFGHIJKLMNOPQRSTUV"
headers = []
labels = []
for idx, t1 in enumerate(targets):
labels.append("({}) {}".format(alpha[idx], t1))
headers.append("({})".format(alpha[idx]))
return table(
olaps,
headers=headers,
labels=labels,
# headers_modify = headers_modify,
diag=None,
)
def gouy_table(self, objects=None, **kwargs):
descriptions = self.object_descriptions(**kwargs)
q_start = descriptions[0].q_start
descB_list = []
for descB in descriptions:
if isinstance(descB.object, optics.Space):
continue
if objects is not None:
if descB.object in objects:
pass
elif descB.desc in objects:
pass
else:
continue
descB_list.append(descB)
obj_list = []
desc_list = []
gouy_table = []
gouy_list = []
diameters_list = []
q_list = []
q2_list = []
length_table = []
M_table = []
A_table = []
B_table = []
C_table = []
D_table = []
for idx_to, descB_to in enumerate(descB_list):
obj_list.append(descB_to.object)
desc_list.append(descB_to.desc)
q_list.append(descB_to.q_start)
q2_list.append(descB_to.q_end)
gouy_diff = []
length_diff = []
gouy_list.append(
np.angle(descB_to.q_start.gouy_phasor / q_start.gouy_phasor, deg=True)
)
diameters_list.append(2 * descB_to.q_start.W)
M_list = []
A_list = []
B_list = []
C_list = []
D_list = []
for idx_fr, descB_fr in enumerate(descB_list):
if idx_fr < idx_to:
gouy_diff.append(
np.angle(
(
descB_to.q_start.gouy_phasor
/ descB_fr.q_start.gouy_phasor
),
deg=True,
)
)
length_diff.append(descB_to.z1_m - descB_fr.z2_m)
M = descB_to.mat1 @ np.linalg.inv(descB_fr.mat1)
elif idx_fr == idx_to:
gouy_diff.append(
np.angle(
(descB_to.q_end.gouy_phasor / descB_fr.q_start.gouy_phasor),
deg=True,
)
)
length_diff.append(descB_to.z2_m - descB_to.z1_m)
M = descB_to.mat2 @ np.linalg.inv(descB_fr.mat1)
else:
gouy_diff.append(
np.angle(
(descB_to.q_end.gouy_phasor / descB_fr.q_end.gouy_phasor),
deg=True,
)
)
length_diff.append(descB_fr.z1_m - descB_to.z2_m)
M = descB_to.mat2 @ np.linalg.inv(descB_fr.mat2)
M_list.append(M)
A_list.append(M[0, 0])
B_list.append(M[0, 1])
C_list.append(M[1, 0])
D_list.append(M[1, 1])
gouy_table.append(gouy_diff)
length_table.append(length_diff)
M_table.append(M_list)
A_table.append(A_list)
B_table.append(B_list)
C_table.append(C_list)
D_table.append(D_list)
gouy_table = np.array(gouy_table)
gouy_list = np.array(gouy_list)
length_table = np.array(length_table)
diameters_list = np.array(diameters_list)
A_table = np.array(A_table)
B_table = np.array(B_table)
C_table = np.array(C_table)
D_table = np.array(D_table)
def gouy_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
units="deg",
diag=None,
**kwargs
):
if units == "deg":
umult = 1
elif units == "rad":
umult = np.pi / 180
else:
raise RuntimeError("Unrecognized units")
if diag is None:
diag = "Gouy separations [{}]".format(units)
return table(
umult * gouy_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
def length_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
units="in",
diag=None,
**kwargs
):
if units == "in":
umult = 1 / 0.0254
elif units == "m":
umult = 1
else:
raise RuntimeError("Unrecognized units")
if diag is None:
diag = "Length separations [{}]".format(units)
return table(
umult * length_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
def diameters_str(
headers=["diameter [um]"], labels=desc_list, units="um", **kwargs
):
if units == "um":
umult = 1e6
else:
raise RuntimeError("Unrecognized units")
return table(
umult * diameters_list.reshape(-1, 1),
headers=headers,
labels=labels,
**kwargs
)
def Qs_str(headers=None, labels=desc_list, side="input", **kwargs):
if side == "input":
if headers is None:
headers = ["Beam Q's", "incoming side"]
qs = np.array([q.string() for q in q_list])
return table(
qs.reshape(-1, 1), headers=headers, labels=list(labels), **kwargs
)
elif side == "output":
if headers is None:
headers = ["Beam Q's", "outgoing side"]
qs = np.array([q.string() for q in q2_list])
return table(
qs.reshape(-1, 1),
headers=headers,
labels=list(labels) + list(labels),
**kwargs
)
def A_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
diag=None,
**kwargs
):
if diag is None:
diag = "Displacement Gain (Abcd) [m/m]"
return table(
A_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
def D_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
diag=None,
**kwargs
):
if diag is None:
diag = "Angle Gain (abcD) [rad/rad]"
return table(
D_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
def B_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
diag=None,
units="mm/mrad",
**kwargs
):
if units == "in":
umult = 1 / 0.0254
elif units == "m/rad":
umult = 1
elif units == "mm/mrad":
umult = 1
elif units == "mm/rad":
umult = 1e3
else:
raise RuntimeError("Unrecognized units")
if diag is None:
diag = "Deflection (aBcd) [{}]".format(units)
return table(
umult * B_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
def C_table_str(
headers=desc_list,
labels=desc_list,
headers_modify="bind",
diag=None,
units="mrad/mm",
**kwargs
):
if units == "in":
umult = 1 / 0.0254
elif units == "rad/m":
umult = 1
elif units == "mrad/mm":
umult = 1
elif units == "rad/mm":
umult = 1e-3
else:
raise RuntimeError("Unrecognized units")
if diag is None:
diag = "Deflection (abCd) [{}]".format(units)
return table(
umult * C_table,
headers=headers,
labels=labels,
headers_modify=headers_modify,
diag=diag,
**kwargs
)
return Bunch(
A_table_str=A_table_str,
B_table_str=B_table_str,
C_table_str=C_table_str,
D_table_str=D_table_str,
A_table=A_table,
B_table=B_table,
Qs_str=Qs_str,
q_list=q_list,
q2_list=q2_list,
obj_list=obj_list,
desc_list=desc_list,
descB_list=descB_list,
gouy_table=gouy_table,
gouy_table_str=gouy_table_str,
gouy_list=gouy_list,
length_table=length_table,
length_table_str=length_table_str,
diameters_list=diameters_list,
diameters_str=diameters_str,
)
def shifts_table(
self,
axis="y",
waists_target=None,
):
waists_target = self.annotation_target(waists_target)
tB = self[waists_target]
shifts_pos = {}
shifts_ang = {}
if axis.lower() == "y":
trans_center = self.trans_center.Y
elif axis.lower() == "x":
trans_center = self.trans_center.X
# use the ol2idx mappings to determine which matrices to use based on the object locations within the path
# to use it, it must first be inverted
reverse_ol2idx = {}
for k, v in trans_center.inc_ol2idx.items():
l = reverse_ol2idx.setdefault(v, [])
l.append(k)
idxs = []
keys = []
for idx, key in sorted(reverse_ol2idx.items()):
idxs.append(idx)
keys.append(key)
if axis.lower() == "y":
mats = np.array([self.trans_center.Y.inc_build_mat[i] for i in idxs]) @ np.linalg.inv(self.trans_center.Y.inc_build_mat[-1])
for shift_key, shift in tB.shiftsYend.items():
shifts = mats @ shift
shifts_pos[shift_key] = shifts[..., 0, 0]
shifts_ang[shift_key] = shifts[..., 1, 0]
elif axis.lower() == "x":
mats = np.array([self.trans_center.X.inc_build_mat[i] for i in idxs]) @ np.linalg.inv(self.trans_center.X.inc_build_mat[-1])
for shift_key, shift in tB.shiftsXend.items():
shifts = mats @ shift
shifts_pos[shift_key] = shifts[..., 0, 0]
shifts_ang[shift_key] = shifts[..., 1, 0]
else:
raise RuntimeError("Unrecognized Axis")
return Bunch(
shifts_keys = keys,
shifts_pos = shifts_pos,
shifts_ang = shifts_ang
)
def shifts_table_str(
self,
axis="y",
var="pos",
waists_target=None,
):
"""
Creates a table of the shifts from optic motions to and from cavities.
Currently it drops spaces in favor of non-space optics when forming the labels
TODO: make the optic naming better for the labels and headers of the table.
"""
axis = axis.lower()
var = var.lower()
assert(axis in ['x', 'y'])
assert(var in ['pos', 'ang'])
sB = self.shifts_table(
axis = axis,
waists_target=waists_target,
)
if var == 'pos':
shifts = sB.shifts_pos
elif var == 'ang':
shifts = sB.shifts_ang
keys = sorted(shifts.keys())
vals = np.array([shifts[k] for k in keys]).T
self.trans_center.X
labels = []
for shift_key_list in sB.shifts_keys:
for sk in shift_key_list:
if isinstance(sk[0], optics.Space):
continue
else:
break
labels.append(str(sk))
return table(
vals,
headers=[str(k) for k in keys],
labels=labels,
diag=var,
)
|
PypiClean
|
/102003712-0.0.6-py3-none-any.whl/topsisLibrary/topsis.py
|
import pandas as pd
import sys
import math
# read_file.to_csv("102003712-data.csv",
# index = None,
# header=True)
def main():
try:
read_file = pd.read_csv(sys.argv[1])
df = pd.DataFrame(read_file)
df1 = df.drop(df.columns[0], axis=1)
w = sys.argv[2]
weight = w.split(",")
weight = [eval(i) for i in weight]
i = sys.argv[3]
impact1 = i.split(",")
impact = []
for i in impact1:
if i == '+':
impact.append(1)
elif (i == '-'):
impact.append(0)
# print(impact)
rows = df1.shape[0]
cols = df1.shape[1]
ss = []
for j in range(0, cols):
sum = 0
for i in range(0, rows):
sum = sum+(df1.iloc[i, j]*df1.iloc[i, j])
sum = math.sqrt(sum)
ss.append(sum)
# print(ss)
for j in range(0, cols):
for i in range(0, rows):
df1.iloc[i, j] = (df1.iloc[i, j]/ss[j])*weight[j]
best = []
worst = []
for j in range(0, cols):
max = -1
min = 10000
for i in range(0, rows):
if (df1.iloc[i, j] > max):
max = df1.iloc[i, j]
if (df1.iloc[i, j] < min):
min2 = df1.iloc[i, j]
if (impact[j] == 1):
best.append(max)
worst.append(min)
elif (impact[j] == 0):
best.append(min)
worst.append(max)
ed_b = []
ed_w = []
for i in range(0, rows):
sum_b = 0
sum_w = 0
for j in range(0, cols):
sum_b = sum_b+((df1.iloc[i, j]-best[j])
* (df1.iloc[i, j]-best[j]))
sum_w = sum_w+((df1.iloc[i, j]-worst[j])
* (df1.iloc[i, j]-worst[j]))
ed_b.append(math.sqrt(sum_b))
ed_w.append(math.sqrt(sum_w))
p = []
for i in range(0, rows):
p.append(ed_w[i]/(ed_b[i]+ed_w[i]))
df["score"] = p
df["Rank"] = df["score"].rank()
df.to_csv(sys.argv[4], index=False)
except FileNotFoundError:
print('file not found')
except:
if (len(sys.argv) != 5):
print('ERROR: Please provide four arguments')
elif (len(weight) != len(impact) or len(weight) != cols or len(impact) != cols):
print('ERROR: incorrect arguments')
else:
print('ERROR')
if __name__ == '__main__':
main()
|
PypiClean
|
/bert_multitask_server-0.1.2-py3-none-any.whl/bert_multitask_serving/server/result_parser.py
|
import numpy as np
def remove_special_tokens(l1, l2):
l2 = l2[1:]
l2 = l2[:len(l1)]
return l1, l2
def merge_entity(tokens, labels):
merged_tokens = []
merged_labels = []
for token, label in zip(tokens, labels):
if label == 'O':
merged_tokens.append(token)
merged_labels.append(label)
elif label[0] == 'B':
merged_tokens.append(token)
merged_labels.append(label[2:])
elif label[0] in ['I', 'M', 'E']:
try:
merged_tokens[-1] += token
except IndexError:
merged_tokens.append(token)
merged_labels.append(label)
else:
# strange label capture
merged_tokens.append(token)
merged_labels.append('O')
# merged_labels[-1] += label
return merged_tokens, merged_labels
def get_model_index(in_array):
if len(in_array.shape) == 3:
in_array = np.argmax(in_array, axis=-1)
return in_array
def ner(pred, label_encoder, tokenizer, problem, extract_ent=True):
result_list = []
pred[problem] = get_model_index(pred[problem])
for input_ids, ner_pred in zip(pred['raw_text'].tolist(), pred[problem].tolist()):
# tokens = tokenizer.convert_ids_to_tokens(input_ids)
# tokens = [t.replace('[unused1]', ' ') for t in tokens]
tokens = list(input_ids.decode('utf8'))
labels = label_encoder.inverse_transform(ner_pred)
tokens, labels = remove_special_tokens(tokens, labels)
tokens, labels = merge_entity(tokens, labels)
if extract_ent:
result_list.append([(ent, ent_type) for ent, ent_type in zip(
tokens, labels) if ent_type != 'O'])
else:
result_list.append(
list(zip(tokens, labels)))
return result_list
def cws(pred, label_encoder, tokenizer, problem):
result_list = []
pred[problem] = get_model_index(pred[problem])
for input_ids, ner_pred in zip(pred['raw_text'].tolist(), pred[problem].tolist()):
# tokens = tokenizer.convert_ids_to_tokens(input_ids)
# tokens = [t.replace('[unused1]', ' ') for t in tokens]
tokens = list(input_ids.decode('utf8'))
labels = label_encoder.inverse_transform(ner_pred)
tokens, labels = remove_special_tokens(tokens, labels)
output_str = ''
for char, char_label in zip(tokens, labels):
if char_label.lower() in ['s', 'e', 'o']:
output_str += char + ' '
else:
output_str += char
result_list.append(output_str)
return result_list
def seq_tag(pred, label_encoder, tokenizer, problem):
result_list = []
pred[problem] = get_model_index(pred[problem])
for input_ids, ner_pred in zip(pred['raw_text'].tolist(), pred[problem].tolist()):
# tokens = tokenizer.convert_ids_to_tokens(input_ids)
# tokens = [t.replace('[unused1]', ' ') for t in tokens]
tokens = list(input_ids.decode('utf8'))
labels = label_encoder.inverse_transform(ner_pred)
tokens, labels = remove_special_tokens(tokens, labels)
tokens, labels = merge_entity(tokens, labels)
result_list.append(
list(zip(tokens, labels)))
return result_list
def cls(pred, label_encoder, tokenizer, problem):
result_list = []
for pred in pred[problem].tolist():
label = label_encoder.inverse_transform([np.argmax(pred)])
result_list.append(label[0])
return result_list
def consolidate_ner(pred: dict, del_origin=True):
new_pred = {'ner': []}
for input_ind in range(len(pred['boson_ner'])):
new_pred['ner'].append([])
for ent, ent_type in pred['weibo_ner'][input_ind]:
if ent_type in ['LOC', 'GPE']:
new_pred['ner'][-1].append([ent, ent_type])
for ent, ent_type in pred['boson_ner'][input_ind]:
if ent_type != 'LOC':
new_pred['ner'][-1].append([ent, ent_type])
if del_origin:
del pred['boson_ner'], pred['weibo_ner']
pred.update(new_pred)
return pred
def text_generation(pred, label_encoder, tokenizer, problem):
result_list = []
pred[problem] = get_model_index(pred[problem])
for text_gen_pred in pred[problem].tolist():
labels = label_encoder.convert_ids_to_tokens(text_gen_pred)
result_list.append(''.join(labels).replace('##', ''))
return result_list
def tag_generation(pred, label_encoder, tokenizer, problem):
result_list = []
pred[problem] = get_model_index(pred[problem])
for text_gen_pred in pred[problem].tolist():
labels = label_encoder.inverse_transform(text_gen_pred)
return result_list
def parse_prediction(pred, label_encoder_dict, tokenizer, params):
for problem in label_encoder_dict:
if 'NER' == problem.split('_')[-1].upper():
pred[problem] = np.array(ner(
pred,
label_encoder_dict[problem],
tokenizer,
problem,
extract_ent=False))
elif 'CWS' == problem.split('_')[-1].upper():
pred[problem] = np.array(cws(
pred,
label_encoder_dict[problem],
tokenizer,
problem))
elif params.problem_type[problem] == 'seq_tag':
pred[problem] = np.array(seq_tag(
pred,
label_encoder_dict[problem],
tokenizer,
problem))
elif params.problem_type[problem] == 'seq2seq_text':
pred[problem] = np.array(text_generation(
pred,
label_encoder_dict[problem],
tokenizer,
problem
))
elif params.problem_type[problem] == 'seq2seq_tag':
pred[problem] = np.array(tag_generation(
pred,
label_encoder_dict[problem],
tokenizer,
problem
))
else:
try:
pred[problem] = np.array(cls(
pred,
label_encoder_dict[problem],
tokenizer,
problem
))
except:
pass
# pred = consolidate_ner(pred, del_origin=False)
return pred
|
PypiClean
|
/invz_package-0.0.9.tar.gz/invz_package-0.0.9/innerverz_package/face_color_transfer/sub_nets/warp_net.py
|
import sys
import torch
import torch.nn as nn
import torch.nn.functional as F
class WTA_scale(torch.autograd.Function):
"""
We can implement our own custom autograd Functions by subclassing
torch.autograd.Function and implementing the forward and backward passes
which operate on Tensors.
"""
@staticmethod
def forward(ctx, input, scale=1e-4):
"""
In the forward pass we receive a Tensor containing the input and return a
Tensor containing the output. You can cache arbitrary Tensors for use in the
backward pass using the save_for_backward method.
"""
activation_max, index_max = torch.max(input, -1, keepdim=True)
input_scale = input * scale # default: 1e-4
# input_scale = input * scale # default: 1e-4
output_max_scale = torch.where(input == activation_max, input, input_scale)
mask = (input == activation_max).type(torch.float)
ctx.save_for_backward(input, mask)
return output_max_scale
@staticmethod
def backward(ctx, grad_output):
"""
In the backward pass we receive a Tensor containing the gradient of the loss
with respect to the output, and we need to compute the gradient of the loss
with respect to the input.
"""
# import pdb
# pdb.set_trace()
input, mask = ctx.saved_tensors
mask_ones = torch.ones_like(mask)
mask_small_ones = torch.ones_like(mask) * 1e-4
# mask_small_ones = torch.ones_like(mask) * 1e-4
grad_scale = torch.where(mask == 1, mask_ones, mask_small_ones)
grad_input = grad_output.clone() * grad_scale
return grad_input, None
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, padding=1, stride=1):
super(ResidualBlock, self).__init__()
self.padding1 = nn.ReflectionPad2d(padding)
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
self.bn1 = nn.InstanceNorm2d(out_channels)
self.prelu = nn.PReLU()
self.padding2 = nn.ReflectionPad2d(padding)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
self.bn2 = nn.InstanceNorm2d(out_channels)
def forward(self, x):
residual = x
out = self.padding1(x)
out = self.conv1(out)
out = self.bn1(out)
out = self.prelu(out)
out = self.padding2(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.prelu(out)
return out
class Warp_Net(nn.Module):
""" input is Al, Bl, channel = 1, range~[0,255] """
def __init__(self):
super(Warp_Net, self).__init__()
self.feature_channel = 64
self.in_channels = self.feature_channel * 4
self.inter_channels = 256
# 44*44
self.layer2_1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(128, 128, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(128),
nn.PReLU(),
nn.ReflectionPad2d(1),
nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(self.feature_channel),
nn.PReLU(),
)
self.layer3_1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 128, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(128),
nn.PReLU(),
nn.ReflectionPad2d(1),
nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(self.feature_channel),
nn.PReLU(),
nn.Upsample(scale_factor=2),
)
# 22*22->44*44
self.layer4_1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(512, 256, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(256),
nn.PReLU(),
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(self.feature_channel),
nn.PReLU(),
nn.Upsample(scale_factor=2),
nn.ReflectionPad2d(1),
nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(self.feature_channel),
nn.PReLU(),
nn.Upsample(scale_factor=2),
)
# 11*11->44*44
self.layer5_1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(512, 256, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(256),
nn.PReLU(),
nn.Upsample(scale_factor=2),
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(256),
nn.PReLU(),
nn.Upsample(scale_factor=2),
nn.ReflectionPad2d(1),
nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
nn.InstanceNorm2d(self.feature_channel),
nn.PReLU(),
nn.Upsample(scale_factor=2),
)
self.layer = nn.Sequential(
ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
)
self.theta = nn.Conv2d(
in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0
)
self.phi = nn.Conv2d(
in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0
)
self.upsampling = nn.Upsample(scale_factor=2)
def forward(
self, # A -> target(structure) / B -> source(color)
A_relu2_1,
A_relu3_1,
A_relu4_1,
A_relu5_1,
B_relu2_1,
B_relu3_1,
B_relu4_1,
B_relu5_1,
):
# scale feature size to 44*44
A_feature2_1 = self.layer2_1(A_relu2_1) # (1, 128, 108, 192) -> (1, 64, 54, 96)
B_feature2_1 = self.layer2_1(B_relu2_1) #
A_feature3_1 = self.layer3_1(A_relu3_1) # (1, 256, 54, 96) -> (1, 64, 54, 96)
B_feature3_1 = self.layer3_1(B_relu3_1)
A_feature4_1 = self.layer4_1(A_relu4_1) # (1, 512, 27, 48) -> (1, 64, 54, 96)
B_feature4_1 = self.layer4_1(B_relu4_1)
A_feature5_1 = self.layer5_1(A_relu5_1) # (1, 512, 13, 24) -> (1, 64, 52, 96)
B_feature5_1 = self.layer5_1(B_relu5_1)
# concatenate features
if A_feature5_1.shape[2] != A_feature2_1.shape[2] or A_feature5_1.shape[3] != A_feature2_1.shape[3]:
A_feature5_1 = F.pad(A_feature5_1, (0, 0, 1, 1), "replicate")
B_feature5_1 = F.pad(B_feature5_1, (0, 0, 1, 1), "replicate")
A_features = self.layer(torch.cat((A_feature2_1, A_feature3_1, A_feature4_1, A_feature5_1), 1))
B_features = self.layer(torch.cat((B_feature2_1, B_feature3_1, B_feature4_1, B_feature5_1), 1))
A_features = self.theta(A_features)
B_features = self.phi(B_features)
return A_features, B_features
"""
# pairwise cosine similarity
theta = self.theta(A_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
theta = theta - theta.mean(dim=-1, keepdim=True) # center the feature
theta_norm = torch.norm(theta, 2, 1, keepdim=True) + sys.float_info.epsilon
theta = torch.div(theta, theta_norm)
theta_permute = theta.permute(0, 2, 1) # 2*(feature_height*feature_width)*256
phi = self.phi(B_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
phi = phi - phi.mean(dim=-1, keepdim=True) # center the feature
phi_norm = torch.norm(phi, 2, 1, keepdim=True) + sys.float_info.epsilon
phi = torch.div(phi, phi_norm)
f = torch.matmul(theta_permute, phi) # 2*(feature_height*feature_width)*(feature_height*feature_width)
if detach_flag:
f = f.detach()
f_similarity = f.unsqueeze_(dim=1)
similarity_map = torch.max(f_similarity, -1, keepdim=True)[0]
similarity_map = similarity_map.view(batch_size, 1, feature_height, feature_width)
# f can be negative
f_WTA = f if WTA_scale_weight == 1 else WTA_scale.apply(f, WTA_scale_weight)
f_WTA = f_WTA / temperature
f_div_C = F.softmax(f_WTA.squeeze_(), dim=-1) # 2*1936*1936;
# downsample the reference color
B_lab = F.avg_pool2d(B_lab_map, 4)
B_lab = B_lab.view(batch_size, channel, -1)
B_lab = B_lab.permute(0, 2, 1) # 2*1936*channel
# multiply the corr map with color
y = torch.matmul(f_div_C, B_lab) # 2*1936*channel
y = y.permute(0, 2, 1).contiguous()
y = y.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
y = self.upsampling(y)
similarity_map = self.upsampling(similarity_map)
return y, similarity_map
"""
|
PypiClean
|
/brawlstats-4.1.1.tar.gz/brawlstats-4.1.1/README.rst
|
.. image:: https://i.imgur.com/5uUkTrn.png
:alt: Brawl Stats
Brawl Stats
===========
.. image:: https://img.shields.io/pypi/v/brawlstats.svg
:target: https://pypi.org/project/brawlstats/
:alt: PyPi
.. image:: https://travis-ci.com/SharpBit/brawlstats.svg?branch=master
:target: https://travis-ci.com/SharpBit/brawlstats
:alt: Travis-CI build
.. image:: https://img.shields.io/pypi/pyversions/brawlstats.svg
:target: https://pypi.org/project/brawlstats/
:alt: Supported Versions
.. image:: https://img.shields.io/github/license/SharpBit/brawlstats.svg
:target: https://github.com/SharpBit/brawlstats/blob/master/LICENSE
:alt: MIT License
- This library is a sync and async wrapper for the Brawl Stars API.
- Python 3.5.3 or later is required.
Features
~~~~~~~~
- Easy to use with an object oriented design.
- Use the same client for sync and async usage.
- Get a player profile and battlelog.
- Get a club and its members.
- Get the top 200 rankings for players, clubs, or a specific brawler.
- Get information about maps, brawlers, and more!
Installation
~~~~~~~~~~~~
Install the latest stable build:
::
pip install brawlstats
Install the development build:
::
pip install git+https://github.com/SharpBit/brawlstats@development
Documentation
~~~~~~~~~~~~~
Documentation is being hosted on `Read the Docs`_.
Examples
~~~~~~~~
Examples are in the `examples folder`_.
- ``sync.py`` shows you basic sync usage
- ``async.py`` shows you basic async usage
- ``discord_cog.py`` shows an example Discord Bot cog using `discord.py`_
Misc
~~~~
- If you are currently using this wrapper, please star this repository :)
- If you come across an issue in the wrapper, please `create an issue`_.
- If you need an API key, visit https://developer.brawlstars.com
Contributing
~~~~~~~~~~~~
Special thanks to this project's contributors ❤️
- `4JR`_
- `golbu`_
- `kawaii banana`_
- `kjkui`_
- `Kyber`_
- `Papiersnipper`_
- `Pollen`_
- `OrangutanGaming`_
- `Stitch`_
If you want to contribute, whether it be a bug fix or new feature, make sure to follow the `contributing guidelines`_.
This project is no longer actively maintained. No new features will be added, only bugfixes and security fixes will be accepted.
.. _create an issue: https://github.com/SharpBit/brawlstats/issues
.. _Read the Docs: https://brawlstats.rtfd.io/
.. _examples folder: https://github.com/SharpBit/brawlstats/tree/master/examples
.. _discord.py: https://github.com/rapptz/discord.py
.. _contributing guidelines: https://github.com/SharpBit/brawlstats/blob/master/CONTRIBUTING.md
.. _4JR: https://github.com/fourjr
.. _OrangutanGaming: https://github.com/OrangutanGaming
.. _Stitch: https://github.com/Soumil07
.. _kjkui: https://github.com/kjkui
.. _Kyber: https://github.com/kyb3r
.. _Papiersnipper: https://github.com/robinmahieu
.. _Pollen: https://github.com/pollen5
.. _kawaii banana: https://github.com/bananaboy21
.. _golbu: https://github.com/0dminnimda
|
PypiClean
|
/aio_openapi-3.2.1.tar.gz/aio_openapi-3.2.1/openapi/pagination/offset.py
|
from dataclasses import dataclass
from typing import Dict, NamedTuple, Optional, Type
from multidict import MultiDict
from yarl import URL
from openapi.data.fields import Choice, integer_field, str_field
from openapi.utils import docjoin
from .pagination import (
DEF_PAGINATION_LIMIT,
MAX_PAGINATION_LIMIT,
Pagination,
PaginationVisitor,
from_filters_and_dataclass,
)
def offsetPagination(
*order_by_fields: str,
default_limit: int = DEF_PAGINATION_LIMIT,
max_limit: int = MAX_PAGINATION_LIMIT,
) -> Type[Pagination]:
"""Crate a limit/offset :class:`.Pagination` dataclass"""
if len(order_by_fields) == 0:
raise ValueError("orderable_fields must be specified")
@dataclass
class OffsetPagination(Pagination):
limit: int = integer_field(
min_value=1,
max_value=max_limit,
default=default_limit,
description="Limit the number of objects returned from the endpoint",
)
offset: int = integer_field(
min_value=0,
default=0,
description=(
"Number of objects to exclude. "
"Use in conjunction with limit to paginate results"
),
)
order_by: str = str_field(
validator=Choice(order_by_fields),
default=order_by_fields[0],
description=(
"Order results by given column (default ascending order). "
f"Possible values are {docjoin(order_by_fields)}"
),
)
def apply(self, visitor: PaginationVisitor) -> None:
visitor.apply_offset_pagination(
limit=self.limit, offset=self.offset, order_by=self.order_by
)
@classmethod
def create_pagination(cls, data: dict) -> "OffsetPagination":
return from_filters_and_dataclass(OffsetPagination, data)
def links(
self, url: URL, data: list, total: Optional[int] = None
) -> Dict[str, str]:
"""Return links for paginated data"""
return Links(url=url, query=MultiDict(url.query)).links(
total, self.limit, self.offset
)
return OffsetPagination
class Links(NamedTuple):
url: URL
query: MultiDict
def first_link(self, total, limit, offset):
n = self._count_part(offset, limit, 0)
if n:
offset -= n * limit
if offset > 0:
return self.link(0, min(limit, offset))
def prev_link(self, total, limit, offset):
if offset:
olimit = min(limit, offset)
prev_offset = offset - olimit
return self.link(prev_offset, olimit)
def next_link(self, total, limit, offset):
next_offset = offset + limit
if total > next_offset:
return self.link(next_offset, limit)
def last_link(self, total, limit, offset):
n = self._count_part(total, limit, offset)
if n > 0:
return self.link(offset + n * limit, limit)
def link(self, offset, limit):
query = self.query.copy()
query.update({"offset": offset, "limit": limit})
return self.url.with_query(query)
def _count_part(self, total, limit, offset):
n = (total - offset) // limit
# make sure we account for perfect matching
if n * limit + offset == total:
n -= 1
return max(0, n)
def links(self, total, limit, offset):
links = {}
first = self.first_link(total, limit, offset)
if first:
links["first"] = first
links["prev"] = self.prev_link(total, limit, offset)
next_ = self.next_link(total, limit, offset)
if next_:
links["next"] = next_
links["last"] = self.last_link(total, limit, offset)
return links
|
PypiClean
|
/fuzzy_rough_learn-0.2.2-py3-none-any.whl/frlearn/base.py
|
from __future__ import annotations
from abc import ABC, abstractmethod
from inspect import signature
import numpy as np
from sklearn.base import BaseEstimator, ClassifierMixin
class SoftMachine(ABC):
"""
Abstract base class for machine learning algorithms.
Once initialised with hyperparameters, effectively a function
that takes construction data and returns a model,
which is another function that takes query data and returns some result.
"""
def __init__(self, preprocessors=()):
self.preprocessors = preprocessors
@abstractmethod
def __call__(self, X, **kwargs) -> SoftMachine.Model:
preprocessing_models = []
for preprocessor in self.preprocessors:
extra_kwargs = {k: v for k, v in kwargs.items() if k in signature(preprocessor.__call__).parameters}
preprocessing_model = preprocessor(X, **extra_kwargs)
X = preprocessing_model(X)
preprocessing_models.append(preprocessing_model)
model = self._construct(X, **kwargs)
model.preprocessing_models = preprocessing_models
return model
@property
def construct(self):
return self.__call__
@abstractmethod
def _construct(self, X, **kwargs) -> SoftMachine.Model:
model = self.Model.__new__(self.Model)
model.n, model.m = model.shape = X.shape
return model
class Model(ABC):
n: int
m: int
shape: tuple[int, ...]
preprocessing_models: list
def __len__(self):
return self.n
@abstractmethod
def __call__(self, X, *args, **kwargs):
for preprocessing_model in self.preprocessing_models:
X = preprocessing_model(X)
return self._query(X, *args, **kwargs)
@abstractmethod
def _query(self, X, *args, **kwargs):
pass
class Unsupervised(SoftMachine):
def __call__(self, X) -> Unsupervised.Model:
return super().__call__(X, )
def _construct(self, X) -> Unsupervised.Model:
model = super()._construct(X)
return model
class Model(SoftMachine.Model):
pass
class ClassSupervised(SoftMachine):
def __call__(self, X, y) -> ClassSupervised.Model:
return super().__call__(X, y=y)
def _construct(self, X, y) -> ClassSupervised.Model:
model = super()._construct(X, y=y)
model.classes = np.unique(y)
model.n_classes = len(model.classes)
return model
class Model(SoftMachine.Model):
classes: np.array
n_classes: int
class LabelSupervised(SoftMachine):
def __call__(self, X, Y) -> LabelSupervised.Model:
return super().__call__(X, Y=Y)
def _construct(self, X, Y) -> LabelSupervised.Model:
model = super()._construct(X, Y=Y)
model.n_labels = Y.shape[1]
return model
class Model(SoftMachine.Model):
n_labels: int
class Classifier(SoftMachine):
class Model(SoftMachine.Model):
def __call__(self, X):
return super().__call__(X)
@property
def query(self):
return self.__call__
@abstractmethod
def _query(self, X):
pass
class DataDescriptor(Unsupervised, Classifier):
class Model(Unsupervised.Model, Classifier.Model):
pass
class MultiClassClassifier(ClassSupervised, Classifier):
class Model(ClassSupervised.Model, Classifier.Model):
pass
class MultiLabelClassifier(LabelSupervised, Classifier):
class Model(LabelSupervised.Model, Classifier.Model):
pass
class Regressor(SoftMachine):
def __call__(self, X, y) -> Regressor.Model:
return super().__call__(X, y=y)
def _construct(self, X, y) -> Regressor.Model:
model = super()._construct(X, y=y)
return model
class Model(SoftMachine.Model):
def __call__(self, X):
return super().__call__(X)
@property
def query(self):
return self.__call__
@abstractmethod
def _query(self, X):
pass
class FeaturePreprocessor(SoftMachine):
class Model(SoftMachine.Model):
def __call__(self, X):
return super().__call__(X)
@property
def transform(self):
return self.__call__
class FeatureSelector(FeaturePreprocessor):
class Model(FeaturePreprocessor.Model):
selection: np.array
def _query(self, X):
return X[:, self.selection]
class SupervisedInstancePreprocessor(ABC):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def __call__(self, X, y):
pass
def select_class(scores, abstention_threshold: float = -1, labels=None):
"""
Convert an array of class scores into class predictions, by selecting the class with the highest score.
If none of the scores is greater than `abstention_threshold`, a generic `other` class will be predicted.
Parameters
----------
scores : array shape=(n, n_classes, )
Array of class scores. Scores should be values in `[0, 1]`
abstention_threshold : float=-1
Threshold to use for predicting one of the classes.
labels : array shape={(n_classes, ), (n_classes + 1, )}, default=None
Labels of the classes in `scores` to be used in the return array. The first label is used for abstention,
it may be omitted if `abstention_threshold == 0`. If `None`, positions are used instead, with 0 used
for abstention if `abstention_threshold >= 0`.
Returns
-------
predictions : array shape=(n, )
Class label for each query instance.
"""
if abstention_threshold >= 0:
scores = np.concatenate([np.broadcast_to(abstention_threshold, (len(scores), 1)), scores], axis=-1)
predictions = np.argmax(scores, axis=-1)
if labels is not None:
predictions = labels[predictions]
return predictions
def discretise(scores, threshold: float = 0.5, ):
"""
Discretise an array of label scores in `[0, 1]` into discrete predictions in `{0, 1}`,
by selecting all labels that score higher than `threshold`.
Parameters
----------
scores : array shape=(n, n_classes, )
Array of class scores. Scores should be values in `[0, 1]`
threshold : float=0.5
Threshold to use for selecting labels.
Returns
-------
predictions : array shape=(n, )
Class label for each query instance.
"""
return scores >= threshold
def probabilities_from_scores(scores):
"""
Rescale an array of class scores into probabilities that sum to 1, by dividing each score by the total sum.
If all scores are zero, probabilities are assigned equally (`1/n_classes`).
Parameters
----------
scores : array shape=(n, n_classes, )
Array of class scores.
Returns
-------
probabilities : array shape=(n, n_classes, )
Array of class probabilities.
"""
scores = scores.copy()
rows_0 = ~np.any(scores, axis=1)
scores[rows_0] = 1
return scores/np.sum(scores, axis=1, keepdims=True)
class FitPredictClassifier(BaseEstimator, ClassifierMixin, ):
"""
Convenience class for using any classifier as a scikit-learn-style classifier with fit and predict methods.
Parameters
----------
classifier_or_class : type or MultiClassClassifier or MultiLabelClassifier}
Either an initialised classifier, or a classifier class. If a class, will be initialised with
all remaining positional and keyword arguments.
"""
def __init__(
self,
classifier_or_class: type[MultiClassClassifier] | type[MultiLabelClassifier] | MultiClassClassifier | MultiLabelClassifier,
*args, **kwargs):
super().__init__()
if isinstance(classifier_or_class, SoftMachine):
self.classifier = classifier_or_class
else:
self.classifier = classifier_or_class(*args, **kwargs)
def fit(self, X, y):
"""
Fit the model using X as training data and y as target values
Parameters
----------
X : array shape=(n_instances, n_features, )
Training data. If array or matrix, shape [n_samples, n_features],
or [n_samples, n_samples] if metric='precomputed'.
y : {array-like, sparse matrix}
Target values of shape = [n_samples] or [n_samples, n_outputs]
"""
self.model_ = self.classifier(X, y)
return self
def predict(self, X):
"""
Predict the class labels for the instances in X.
Parameters
----------
X : array shape=(n_instances, n_features, )
Query instances.
Returns
-------
y : array shape=(n_instances, )
Class label for each query instance.
"""
scores = self.model_(X)
if isinstance(self.classifier, MultiClassClassifier):
return select_class(scores, labels=self.model_.classes)
else:
return discretise(scores)
def predict_proba(self, X):
"""
Calculate probability estimates for the instances in X.
Parameters
----------
X : array shape=(n_instances, n_features, )
Query instances.
Returns
-------
p : array shape=(n_instances, n_classes, )
The class probabilities of the query instances. Classes are ordered
by lexicographic order.
"""
# normalise membership degrees into confidence scores
scores = self.model_(X)
return probabilities_from_scores(scores)
|
PypiClean
|
/xpublish-host-1.1.5.tar.gz/xpublish-host-1.1.5/xpublish_host/config.py
|
import ipaddress as ip
import logging
import os
import typing as t
from pprint import pformat
from goodconf import GoodConf
from pydantic import (
BaseModel,
PositiveInt,
PyObject,
)
import xpublish
L = logging.getLogger(__name__)
class ClusterConfig(BaseModel):
module: PyObject
# Plugin arguments
args: set[str] = ()
# Plugin named arguments
kwargs: dict[str, t.Any] = {}
class PluginConfig(BaseModel):
module: PyObject
# Plugin arguments
args: set[str] = ()
# Plugin named arguments
kwargs: dict[str, t.Any] = {}
class RestConfig(GoodConf):
publish_host: ip.IPv4Address = '0.0.0.0'
publish_port: PositiveInt = 9000
log_level: str = 'debug'
plugins_load_defaults: bool = True
plugins_config: dict[str, PluginConfig] = {}
"""
docs_url="/api"
openapi_url="/api.json"
"""
app_config: dict[str, t.Any] = {
'docs_url': '/api',
'openapi_url': '/api.json',
}
"""
available_bytes=1e11
"""
cache_config: dict[str, t.Any] = {
'available_bytes': 1e11
}
"""
{
'processes': True,
'n_workers': 8,
'threads_per_worker': 1,
'memory_limit': '4GiB',
}
{} = don't load a cluster, parallel=False must be set
on dataset load or this will cause errors
None = use default cluster
"""
cluster_config: ClusterConfig | None = None
class Config:
file_env_file = os.environ.get('XPUB_CONFIG_FILE', 'config.yaml')
env_file = os.environ.get('XPUB_ENV_FILES', '.env')
env_file_encoding = 'utf-8'
env_prefix = 'XPUB_'
env_nested_delimiter = '__'
def setup_rest(self):
load_defaults = None
if self.plugins_load_defaults is False:
load_defaults = {}
plugs = self.setup_plugins()
# Start with no datasets, they are all loaded
# using the DatasetConfigPlugin
rest = xpublish.Rest(
None,
plugins=load_defaults,
app_kws=dict(self.app_config),
cache_kws=dict(self.cache_config),
)
for p in plugs.values():
rest.register_plugin(p, overwrite=True)
config = {
'app': dict(self.app_config),
'cache': dict(self.cache_config),
'plugins': plugs,
}
config_out = pformat(config)
L.info(config_out)
return rest
def setup_cluster(self):
"""
Load the cluster config we should use for serving this dataset.
Some integration with Dask Gateway here would be sweet!
To use no cluster, set to an empty dict. To use the default, set to None.
This is similar to how plugins work.
"""
# None or empty - don't load a cluster
if not self.cluster_config:
return None
# Only spawn a cluster if distributed is installed
try:
from dask.distributed import Client # noqa
except ImportError:
L.warning("The dask 'distributed' library is not installed, no cluster support")
return None
cluster = self.cluster_config.module(
*self.cluster_config.args,
**self.cluster_config.kwargs
)
L.info(f'Created cluster: {cluster}')
return cluster
def setup_plugins(self):
plugins = {}
for p in self.plugins_config.values():
try:
plug = p.module(
*p.args,
**p.kwargs
)
plugins[plug.name] = plug
except BaseException as e:
L.error(f"Could not load the {p} plugin: {e}")
return plugins
def setup(self, create_cluster_client=True):
"""
_summary_
Args:
create_cluster_client (bool, optional): When run outside of
gunicorn this needs to be True for the dask client object
to be created. Defaults to True.
"""
if create_cluster_client is True:
cluster = self.setup_cluster()
if cluster:
from dask.distributed import Client
client = Client(cluster)
L.info(f'Using cluster: {client.cluster}')
L.info(f'Dashboard: {client.cluster.dashboard_link}')
rest = self.setup_rest()
return rest
def serve_kwargs(self):
return dict(
host=str(self.publish_host),
port=self.publish_port,
log_level=self.log_level,
)
|
PypiClean
|